commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
db00a30c62b2a2703bb4531059b4f9d10f86496a | Adjust curly parser tests to reflect changes. | vyos-legacy/vyconfd,vyos-legacy/vyconfd | tests/integration/curly_parser/curly_parser_test.py | tests/integration/curly_parser/curly_parser_test.py | #!/usr/bin/env python
#
# curly_parser_test.py: tests for vyconf.configfile.curly.Parser
# Copyright (C) 2014 VyOS Development Group <maintainers@vyos.net>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
import vyconf.configfile.curly
import unittest
# Comparing to a complete datastructure is sort of ugly, but I don't see
# a better way to make sure it works correctly yet.
class TestCurlyParser(unittest.TestCase):
def setUp(self):
self.parser = vyconf.configfile.curly.Parser()
def test_empty_config(self):
self.assertRaises(vyconf.configfile.curly.ParseError, self.parser.parse, "")
def test_single_empty_node(self):
parser = vyconf.configfile.curly.Parser()
result = [('node', {'comment': None, 'content': None, 'name': ['foo']})]
ast = parser.parse("foo { }",positiontracking=False)
self.assertEqual(ast, result)
def test_single_node_with_comment(self):
result = [('node', {'comment': 'Foo', 'content': None, 'name': ['foo']})]
ast = self.parser.parse("/* Foo */ foo {}",positiontracking=True)
self.assertEqual(ast, result)
def test_single_non_empty_node(self):
result = [('node', {'comment': None, 'content':
[('leaf-node', {'comment': None, 'name': ['bar'], 'value': 0})], 'name': ['foo']})]
ast = self.parser.parse("foo { bar 0; }")
self.assertEqual(ast, result)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
#
# curly_parser_test.py: tests for vyconf.configfile.curly.Parser
# Copyright (C) 2014 VyOS Development Group <maintainers@vyos.net>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
import vyconf.configfile.curly
import unittest
# Comparing to a complete datastructure is sort of ugly, but I don't see
# a better way to make sure it works correctly yet.
class TestCurlyParser(unittest.TestCase):
def setUp(self):
self.parser = vyconf.configfile.curly.Parser()
def test_empty_config(self):
self.assertRaises(vyconf.configfile.curly.ParseError, self.parser.parse, "")
def test_single_empty_node(self):
parser = vyconf.configfile.curly.Parser()
result = [('node', {'comment': None, 'content': [], 'name': 'foo'})]
ast = parser.parse("foo { }",positiontracking=False)
self.assertEqual(ast, result)
def test_single_node_with_comment(self):
result = [('node', {'comment': 'Foo', 'content': [], 'name': 'foo'})]
ast = self.parser.parse("/* Foo */ foo {}",positiontracking=True)
self.assertEqual(ast, result)
def test_single_non_empty_node(self):
result = [('node', {'comment': None, 'content':
[('leaf-node', {'comment': None, 'name': 'bar', 'value': '0'})], 'name': 'foo'})]
ast = self.parser.parse("foo { bar 0; }")
self.assertEqual(ast, result)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
68e58114919208b69a01880f52e8b8e2918a4edb | make failing ogr/shape comparison a todo | yiqingj/work,rouault/mapnik,rouault/mapnik,mbrukman/mapnik,mbrukman/mapnik,tomhughes/python-mapnik,mapnik/mapnik,mapnik/python-mapnik,cjmayo/mapnik,stefanklug/mapnik,kapouer/mapnik,lightmare/mapnik,naturalatlas/mapnik,jwomeara/mapnik,pnorman/mapnik,tomhughes/mapnik,qianwenming/mapnik,yiqingj/work,cjmayo/mapnik,mapycz/python-mapnik,Airphrame/mapnik,pramsey/mapnik,garnertb/python-mapnik,naturalatlas/mapnik,Mappy/mapnik,stefanklug/mapnik,mapnik/python-mapnik,CartoDB/mapnik,manz/python-mapnik,kapouer/mapnik,mapycz/mapnik,mapnik/mapnik,mbrukman/mapnik,tomhughes/python-mapnik,mapycz/mapnik,jwomeara/mapnik,pnorman/mapnik,Uli1/mapnik,zerebubuth/mapnik,whuaegeanse/mapnik,yohanboniface/python-mapnik,qianwenming/mapnik,whuaegeanse/mapnik,mapycz/mapnik,Mappy/mapnik,kapouer/mapnik,whuaegeanse/mapnik,Mappy/mapnik,Airphrame/mapnik,zerebubuth/mapnik,Airphrame/mapnik,garnertb/python-mapnik,pramsey/mapnik,lightmare/mapnik,manz/python-mapnik,davenquinn/python-mapnik,pramsey/mapnik,strk/mapnik,zerebubuth/mapnik,jwomeara/mapnik,tomhughes/python-mapnik,naturalatlas/mapnik,mapnik/mapnik,pnorman/mapnik,Uli1/mapnik,whuaegeanse/mapnik,rouault/mapnik,yiqingj/work,stefanklug/mapnik,CartoDB/mapnik,cjmayo/mapnik,strk/mapnik,strk/mapnik,strk/mapnik,sebastic/python-mapnik,mapnik/mapnik,cjmayo/mapnik,tomhughes/mapnik,davenquinn/python-mapnik,lightmare/mapnik,sebastic/python-mapnik,tomhughes/mapnik,naturalatlas/mapnik,qianwenming/mapnik,stefanklug/mapnik,tomhughes/mapnik,qianwenming/mapnik,Uli1/mapnik,davenquinn/python-mapnik,jwomeara/mapnik,sebastic/python-mapnik,rouault/mapnik,lightmare/mapnik,kapouer/mapnik,mapnik/python-mapnik,pramsey/mapnik,qianwenming/mapnik,mapycz/python-mapnik,Mappy/mapnik,manz/python-mapnik,yiqingj/work,yohanboniface/python-mapnik,Uli1/mapnik,garnertb/python-mapnik,Airphrame/mapnik,CartoDB/mapnik,yohanboniface/python-mapnik,mbrukman/mapnik,pnorman/mapnik | tests/python_tests/ogr_and_shape_geometries_test.py | tests/python_tests/ogr_and_shape_geometries_test.py | #!/usr/bin/env python
from nose.tools import *
from utilities import execution_path, Todo
import os, sys, glob, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
# TODO - fix truncation in shapefile...
polys = ["POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))",
"POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))",
"MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"
"MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))"
]
plugins = mapnik.DatasourceCache.instance().plugin_names()
if 'shape' in plugins and 'ogr' in plugins:
def test_geometries_are_interpreted_equivalently():
shapefile = '../data/shp/wkt_poly.shp'
ds1 = mapnik.Ogr(file=shapefile,layer_by_index=0)
ds2 = mapnik.Shapefile(file=shapefile)
fs1 = ds1.featureset()
fs2 = ds2.featureset()
raise Todo("output will differ between ogr and shape, may not matter, needs a closer look")
count = 0;
while(True):
count += 1
feat1 = fs1.next()
feat2 = fs2.next()
if not feat1:
break
#import pdb;pdb.set_trace()
#print feat1
eq_(str(feat1),str(feat2))
eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt())
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| #!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, sys, glob, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
# TODO - fix truncation in shapefile...
polys = ["POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))",
"POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))",
"MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"
"MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))"
]
plugins = mapnik.DatasourceCache.instance().plugin_names()
if 'shape' in plugins and 'ogr' in plugins:
def test_geometries_are_interpreted_equivalently():
shapefile = '../data/shp/wkt_poly.shp'
ds1 = mapnik.Ogr(file=shapefile,layer_by_index=0)
ds2 = mapnik.Shapefile(file=shapefile)
fs1 = ds1.featureset()
fs2 = ds2.featureset()
count = 0;
while(True):
count += 1
feat1 = fs1.next()
feat2 = fs2.next()
if not feat1:
break
#import pdb;pdb.set_trace()
#print feat1
eq_(str(feat1),str(feat2))
eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt())
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| lgpl-2.1 | Python |
8e57bac9ca41bfcccfabc8524ddc2a8730ac4609 | Update quality_score_filter.py | amojarro/carrierseq,amojarro/carrierseq | python/quality_score_filter.py | python/quality_score_filter.py | from Bio import SeqIO
import math
from Tkinter import Tk
import sys
name = sys.argv[1]
qs = float(sys.argv[3])
output = sys.argv[2]
count = 0
for rec in SeqIO.parse(name, "fastq"):
count += 1
qual_sequences = []
cnt = 0
for rec in SeqIO.parse(name, "fastq"):
rec.letter_annotations["phred_quality"]
probs = []
for q in rec.letter_annotations["phred_quality"]:
e = float(math.pow(10.0,-1*(float(q)/10.0)))
# print q, e
probs.append(e)
av_prob = float(sum(probs))/float(len((rec.letter_annotations["phred_quality"])))
# print av_prob
av_q = float(-10.0*(math.log10(float(av_prob))))
# print av_prob, av_q
if av_q >= qs:
cnt += 1
qual_sequences.append(rec)
output_handle = open(output +'.fa', "w")
SeqIO.write(qual_sequences, output_handle, "fasta")
output_handle.close()
output_handle = open(output +'.fq', "w")
SeqIO.write(qual_sequences, output_handle, "fastq")
output_handle.close()
| from Bio import SeqIO
import math
from Tkinter import Tk
import sys
name = sys.argv[1]
qs = float(sys.argv[3])
output = sys.argv[2]
count = 0
for rec in SeqIO.parse(name, "fastq"):
count += 1
print("%i reads in fastq file" % count)
qual_sequences = [] # Setup an empty list
cnt = 0
for rec in SeqIO.parse(name, "fastq"):
rec.letter_annotations["phred_quality"]
probs = []
for q in rec.letter_annotations["phred_quality"]:
e = float(math.pow(10.0,-1*(float(q)/10.0)))
# print q, e
probs.append(e)
av_prob = float(sum(probs))/float(len((rec.letter_annotations["phred_quality"])))
# print av_prob
av_q = float(-10.0*(math.log10(float(av_prob))))
# print av_prob, av_q
if av_q >= qs:
cnt += 1
qual_sequences.append(rec)
print cnt,'Quality reads saved'
output_handle = open(output +'.fa', "w")
SeqIO.write(qual_sequences, output_handle, "fasta")
output_handle.close()
output_handle = open(output +'.fq', "w")
SeqIO.write(qual_sequences, output_handle, "fastq")
output_handle.close()
| mit | Python |
fa2c69bf4399f3a96505fe33050433f275ff6e0b | Bump version to 0.0.3 | vivek8943/twitter-streamer,inactivist/twitter-streamer,bqevin/twitter-streamer,inactivist/twitter-streamer,bqevin/twitter-streamer,vivek8943/twitter-streamer | streamer/__init__.py | streamer/__init__.py | __version__ = "0.0.3"
| __version__ = "0.0.2"
| mit | Python |
2304dcf3ebf189d7c3b1a00211a288e359c4cbb5 | Rename signals for consistency | bow/volt | volt/signals.py | volt/signals.py | """Signals for hooks."""
# Copyright (c) 2012-2022 Wibowo Arindrarto <contact@arindrarto.dev>
# SPDX-License-Identifier: BSD-3-Clause
from typing import Any
import structlog
from blinker import signal, NamedSignal
from structlog.contextvars import bound_contextvars
log = structlog.get_logger(__name__)
post_site_load_engines = signal("post_site_load_engines")
post_site_collect_targets = signal("post_site_collect_targets")
pre_site_write = signal("pre_site_write")
def send(signal: NamedSignal, *args: Any, **kwargs: Any) -> None:
with bound_contextvars(signal=f"{signal.name}"):
log.debug("sending to signal")
rvs = signal.send(*args, **kwargs)
log.debug("sent to signal", num_receiver=len(rvs))
return None
def _clear() -> None:
for s in (
post_site_load_engines,
post_site_collect_targets,
pre_site_write,
):
log.debug("clearing receivers", signal=s.name)
s.receivers.clear()
return None
| """Signals for hooks."""
# Copyright (c) 2012-2022 Wibowo Arindrarto <contact@arindrarto.dev>
# SPDX-License-Identifier: BSD-3-Clause
from typing import Any
import structlog
from blinker import signal, NamedSignal
from structlog.contextvars import bound_contextvars
log = structlog.get_logger(__name__)
post_site_load_engines = signal("post-site-load-engines")
post_site_collect_targets = signal("post-site-collect-targets")
pre_site_write = signal("pre-site-write")
def send(signal: NamedSignal, *args: Any, **kwargs: Any) -> None:
with bound_contextvars(signal=f"{signal.name}"):
log.debug("sending to signal")
rvs = signal.send(*args, **kwargs)
log.debug("sent to signal", num_receiver=len(rvs))
return None
def _clear() -> None:
for s in (
post_site_load_engines,
post_site_collect_targets,
pre_site_write,
):
log.debug("clearing receivers", signal=s.name)
s.receivers.clear()
return None
| bsd-3-clause | Python |
b58dcf4ce81b234de6701468296f4185ed63a8e2 | Add filters to the admin interface | SAlkhairy/trabd,SAlkhairy/trabd,SAlkhairy/trabd,SAlkhairy/trabd | voting/admin.py | voting/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from voting.models import Position, SACYear, Nomination
def make_rejected(ModelAdmin, request, queryset):
queryset.update(is_rejected=True)
make_rejected.short_description = "رفض المرشحـ/ين المختار/ين"
class NominationAdmin(admin.ModelAdmin):
list_filter = ['position', 'is_rejected']
list_display = ['__unicode__', 'cv', 'plan', 'is_rejected']
search_fields = ['position__title', 'user__username',
'user__email', 'user__profile__ar_first_name',
'user__profile__ar_middle_name',
'user__profile__ar_last_name',
'user__profile__en_first_name',
'user__profile__en_middle_name',
'user__profile__en_last_name',
'user__profile__student_id',
'user__profile__mobile_number']
actions = [make_rejected]
class PositionAdmin(admin.ModelAdmin):
list_filter = ['entity', 'year']
admin.site.register(Nomination, NominationAdmin)
admin.site.register(Position, PositionAdmin)
admin.site.register(SACYear)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from voting.models import Position, SACYear, Nomination
def make_rejected(ModelAdmin, request, queryset):
queryset.update(is_rejected=True)
make_rejected.short_description = "رفض المرشحـ/ين المختار/ين"
class NominationAdmin(admin.ModelAdmin):
list_filter = ['position', 'is_rejected']
list_display = ['__unicode__', 'cv', 'plan', 'is_rejected']
search_fields = ['position__title', 'user__username',
'user__email', 'user__profile__ar_first_name',
'user__profile__ar_middle_name',
'user__profile__ar_last_name',
'user__profile__en_first_name',
'user__profile__en_middle_name',
'user__profile__en_last_name',
'user__profile__student_id',
'user__profile__mobile_number']
actions = [make_rejected]
admin.site.register(Nomination, NominationAdmin)
admin.site.register(SACYear)
admin.site.register(Position)
| agpl-3.0 | Python |
847fc43b572384f8afcd395ada275b053e24a193 | Fix aiohttp test | 1st1/uvloop,MagicStack/uvloop,MagicStack/uvloop | tests/test_aiohttp.py | tests/test_aiohttp.py | try:
import aiohttp
import aiohttp.web
except ImportError:
skip_tests = True
else:
skip_tests = False
import asyncio
import unittest
from uvloop import _testbase as tb
class _TestAioHTTP:
def test_aiohttp_basic_1(self):
PAYLOAD = '<h1>It Works!</h1>' * 10000
async def on_request(request):
return aiohttp.web.Response(text=PAYLOAD)
asyncio.set_event_loop(self.loop)
app = aiohttp.web.Application(loop=self.loop)
app.router.add_get('/', on_request)
f = self.loop.create_server(
app.make_handler(),
'0.0.0.0', '0')
srv = self.loop.run_until_complete(f)
port = srv.sockets[0].getsockname()[1]
async def test():
for addr in (('localhost', port),
('127.0.0.1', port)):
async with aiohttp.ClientSession() as client:
async with client.get('http://{}:{}'.format(*addr)) as r:
self.assertEqual(r.status, 200)
result = await r.text()
self.assertEqual(result, PAYLOAD)
self.loop.run_until_complete(test())
self.loop.run_until_complete(app.shutdown())
self.loop.run_until_complete(app.cleanup())
@unittest.skipIf(skip_tests, "no aiohttp module")
class Test_UV_AioHTTP(_TestAioHTTP, tb.UVTestCase):
pass
@unittest.skipIf(skip_tests, "no aiohttp module")
class Test_AIO_AioHTTP(_TestAioHTTP, tb.AIOTestCase):
pass
| try:
import aiohttp
import aiohttp.server
except ImportError:
skip_tests = True
else:
skip_tests = False
import asyncio
import unittest
from uvloop import _testbase as tb
class _TestAioHTTP:
def test_aiohttp_basic_1(self):
PAYLOAD = b'<h1>It Works!</h1>' * 10000
class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):
async def handle_request(self, message, payload):
response = aiohttp.Response(
self.writer, 200, http_version=message.version
)
response.add_header('Content-Type', 'text/html')
response.add_header('Content-Length', str(len(PAYLOAD)))
response.send_headers()
response.write(PAYLOAD)
await response.write_eof()
asyncio.set_event_loop(self.loop)
f = self.loop.create_server(
lambda: HttpRequestHandler(keepalive_timeout=1),
'0.0.0.0', '0')
srv = self.loop.run_until_complete(f)
port = srv.sockets[0].getsockname()[1]
async def test():
for addr in (('localhost', port),
('127.0.0.1', port)):
async with aiohttp.ClientSession() as client:
async with client.get('http://{}:{}'.format(*addr)) as r:
self.assertEqual(r.status, 200)
self.assertEqual(len(await r.text()), len(PAYLOAD))
self.loop.run_until_complete(test())
srv.close()
self.loop.run_until_complete(srv.wait_closed())
@unittest.skipIf(skip_tests, "no aiohttp module")
class Test_UV_AioHTTP(_TestAioHTTP, tb.UVTestCase):
pass
@unittest.skipIf(skip_tests, "no aiohttp module")
class Test_AIO_AioHTTP(_TestAioHTTP, tb.AIOTestCase):
pass
| apache-2.0 | Python |
8b6cbdbae4dedfbbf025a7ecb20c7d7b3959ed11 | support to overwrite position in border | w2srobinho/RbGomoku | rbgomoku/core/player.py | rbgomoku/core/player.py | from core import OverwritePositionException
from core.board import Piece
class AIPlayer:
""" Abstract AI players.
To construct an AI player:
Construct an instance (of its subclass) with the game Board
"""
def __init__(self, board, piece):
self._board = board
self.my_piece = piece
self.opponent = Piece.WHITE if piece == Piece.BLACK else Piece.BLACK
def play(self, row, col):
raise NotImplemented
class HumanPlayer(AIPlayer):
""" Human Player
"""
def __init__(self, board, piece, first=True):
super(HumanPlayer, self).__init__(board, piece)
self.first = not first
def play(self, row, col):
if self._board.get_piece(row, col) != Piece.NONE:
raise OverwritePositionException
return self._board.play_piece(self.my_piece, row, col)
def __repr__(self):
player_number = int(self.first) + 1
return 'Player {}'.format(player_number) | from core.board import Piece
class AIPlayer:
""" Abstract AI players.
To construct an AI player:
Construct an instance (of its subclass) with the game Board
"""
def __init__(self, board, piece):
self._board = board
self.my_piece = piece
self.opponent = Piece.WHITE if piece == Piece.BLACK else Piece.BLACK
# Abstract method to get next move. Return int[2] of {row, col} */
def play(self, row, col):
raise NotImplemented
class HumanPlayer(AIPlayer):
""" Human Player
"""
def __init__(self, board, piece):
super(HumanPlayer, self).__init__(board, piece)
def play(self, row, col):
self._board.play_piece(self.my_piece, row, col)
self._board.has_winner(self.my_piece, row, col)
return self._board.winner
| mit | Python |
2b7d52369206f6a6b9f0ceb4afe28e73e652e806 | Fix typo s/router/route | georgeyk/loafer | loafer/consumer.py | loafer/consumer.py | # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
import asyncio
import json
from functools import partial
import logging
import boto3
import botocore.exceptions
from .conf import settings
from .exceptions import ConsumerError
logger = logging.getLogger(__name__)
class SQSConsumer(object):
def __init__(self, loop=None):
self._loop = loop or asyncio.get_event_loop()
self._semaphore = asyncio.Semaphore(settings.MAX_JOBS)
self._client = boto3.client('sqs')
async def process_message(self, route, message):
logger.info('Message received, handling to route={}'.format(route))
logger.debug('Processing Message={}', message)
# TODO: better heuristic
try:
body = json.loads(message['Body'])
except json.decoder.JSONDecodeError:
body = message['Body']
content = body
if isinstance(body, dict):
if 'Message' in body:
content = body['Message']
# Since we don't know what will happen on message handler, use semaphore
# to protect scheduling or executing too many coroutines/threads
with await self._semaphore:
# TODO: depending on content type, we should pass as *args or **kwargs
logger.info('Message content data type is {!r}'.format(type(content)))
await route.deliver(content)
await self.ack_message(route.queue_url, message['ReceiptHandle'])
async def ack_message(self, queue, receipt):
logger.info('Acking message')
logger.debug('receipt={}'.format(receipt))
fn = partial(self._client.delete_message, QueueUrl=queue, ReceiptHandle=receipt)
# XXX: Refactor this when boto support asyncio
return await self._loop.run_in_executor(None, fn)
async def consume(self, routes):
while True:
for route in routes:
try:
messages = await route.fetch_messages()
except botocore.exceptions.ClientError as exc:
logger.exception(exc)
raise ConsumerError('Error when fetching messages') from exc
for message in messages:
await self.process_message(route, message)
| # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
import asyncio
import json
from functools import partial
import logging
import boto3
import botocore.exceptions
from .conf import settings
from .exceptions import ConsumerError
logger = logging.getLogger(__name__)
class SQSConsumer(object):
def __init__(self, loop=None):
self._loop = loop or asyncio.get_event_loop()
self._semaphore = asyncio.Semaphore(settings.MAX_JOBS)
self._client = boto3.client('sqs')
async def process_message(self, route, message):
logger.info('Message received, handling to route={}'.format(route))
logger.debug('Processing Message={}', message)
# TODO: better heuristic
try:
body = json.loads(message['Body'])
except json.decoder.JSONDecodeError:
body = message['Body']
content = body
if isinstance(body, dict):
if 'Message' in body:
content = body['Message']
# Since we don't know what will happen on message handler, use semaphore
# to protect scheduling or executing too many coroutines/threads
with await self._semaphore:
# TODO: depending on content type, we should pass as *args or **kwargs
logger.info('Message content data type is {!r}'.format(type(content)))
await route.deliver(content)
await self.ack_message(route.queue_url, message['ReceiptHandle'])
async def ack_message(self, queue, receipt):
logger.info('Acking message')
logger.debug('receipt={}'.format(receipt))
fn = partial(self._client.delete_message, QueueUrl=queue, ReceiptHandle=receipt)
# XXX: Refactor this when boto support asyncio
return await self._loop.run_in_executor(None, fn)
async def consume(self, routes):
while True:
for router in routes:
try:
messages = await router.fetch_messages()
except botocore.exceptions.ClientError as exc:
logger.exception(exc)
raise ConsumerError('Error when fetching messages') from exc
for message in messages:
await self.process_message(router, message)
| mit | Python |
cc78aef74876049a4548398133bad64e405351de | Remove redundant parameters from wagtailuserbar tag; trigger a DeprecationWarning if people are still passing a css path | helenwarren/pied-wagtail,serzans/wagtail,benemery/wagtail,helenwarren/pied-wagtail,mephizzle/wagtail,hamsterbacke23/wagtail,FlipperPA/wagtail,bjesus/wagtail,rsalmaso/wagtail,gogobook/wagtail,rjsproxy/wagtail,rv816/wagtail,chimeno/wagtail,iansprice/wagtail,bjesus/wagtail,timorieber/wagtail,stevenewey/wagtail,JoshBarr/wagtail,nutztherookie/wagtail,Toshakins/wagtail,rjsproxy/wagtail,mixxorz/wagtail,JoshBarr/wagtail,kurtrwall/wagtail,jnns/wagtail,willcodefortea/wagtail,mayapurmedia/wagtail,Tivix/wagtail,zerolab/wagtail,dresiu/wagtail,stevenewey/wagtail,nimasmi/wagtail,taedori81/wagtail,mikedingjan/wagtail,hanpama/wagtail,nealtodd/wagtail,nealtodd/wagtail,WQuanfeng/wagtail,mixxorz/wagtail,chrxr/wagtail,KimGlazebrook/wagtail-experiment,torchbox/wagtail,iansprice/wagtail,mikedingjan/wagtail,darith27/wagtail,nrsimha/wagtail,chimeno/wagtail,JoshBarr/wagtail,m-sanders/wagtail,torchbox/wagtail,mixxorz/wagtail,willcodefortea/wagtail,nilnvoid/wagtail,mjec/wagtail,gasman/wagtail,rsalmaso/wagtail,takeshineshiro/wagtail,nilnvoid/wagtail,taedori81/wagtail,kurtw/wagtail,tangentlabs/wagtail,Toshakins/wagtail,lojack/wagtail,marctc/wagtail,inonit/wagtail,hamsterbacke23/wagtail,dresiu/wagtail,thenewguy/wagtail,nealtodd/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,wagtail/wagtail,WQuanfeng/wagtail,nrsimha/wagtail,janusnic/wagtail,Klaudit/wagtail,iho/wagtail,iho/wagtail,mayapurmedia/wagtail,nilnvoid/wagtail,darith27/wagtail,serzans/wagtail,100Shapes/wagtail,hanpama/wagtail,benjaoming/wagtail,thenewguy/wagtail,thenewguy/wagtail,jnns/wagtail,Tivix/wagtail,Pennebaker/wagtail,zerolab/wagtail,mjec/wagtail,FlipperPA/wagtail,Tivix/wagtail,rjsproxy/wagtail,marctc/wagtail,m-sanders/wagtail,rsalmaso/wagtail,chimeno/wagtail,mephizzle/wagtail,jordij/wagtail,Pennebaker/wagtail,torchbox/wagtail,jorge-marques/wagtail,mayapurmedia/wagtail,bjesus/wagtail,chrxr/wagtail,lojack/wagtail,timorieber/wagtail,willcodefortea/wagtail,benemery/wagtail,Pennebaker/wagtail,gogobook/wagtail,kurtw/wagtail,benemery/wagtail,nrsimha/wagtail,marctc/wagtail,kaedroho/wagtail,bjesus/wagtail,janusnic/wagtail,hanpama/wagtail,mixxorz/wagtail,Klaudit/wagtail,100Shapes/wagtail,quru/wagtail,kurtrwall/wagtail,davecranwell/wagtail,benjaoming/wagtail,inonit/wagtail,dresiu/wagtail,jorge-marques/wagtail,taedori81/wagtail,benjaoming/wagtail,stevenewey/wagtail,Klaudit/wagtail,helenwarren/pied-wagtail,kaedroho/wagtail,gasman/wagtail,takeshineshiro/wagtail,takeflight/wagtail,jnns/wagtail,rv816/wagtail,jordij/wagtail,kurtrwall/wagtail,chrxr/wagtail,gogobook/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,jorge-marques/wagtail,mephizzle/wagtail,iansprice/wagtail,davecranwell/wagtail,nimasmi/wagtail,takeshineshiro/wagtail,mikedingjan/wagtail,m-sanders/wagtail,benemery/wagtail,rv816/wagtail,takeflight/wagtail,nutztherookie/wagtail,gasman/wagtail,marctc/wagtail,gasman/wagtail,KimGlazebrook/wagtail-experiment,nealtodd/wagtail,janusnic/wagtail,kaedroho/wagtail,quru/wagtail,kurtrwall/wagtail,wagtail/wagtail,nilnvoid/wagtail,tangentlabs/wagtail,iho/wagtail,JoshBarr/wagtail,gogobook/wagtail,iansprice/wagtail,willcodefortea/wagtail,nimasmi/wagtail,KimGlazebrook/wagtail-experiment,serzans/wagtail,rv816/wagtail,zerolab/wagtail,quru/wagtail,nimasmi/wagtail,mixxorz/wagtail,darith27/wagtail,jordij/wagtail,rjsproxy/wagtail,mayapurmedia/wagtail,timorieber/wagtail,tangentlabs/wagtail,wagtail/wagtail,takeflight/wagtail,WQuanfeng/wagtail,kaedroho/wagtail,zerolab/wagtail,Toshakins/wagtail,mephizzle/wagtail,serzans/wagtail,Tivix/wagtail,torchbox/wagtail,mjec/wagtail,quru/wagtail,wagtail/wagtail,benjaoming/wagtail,Klaudit/wagtail,taedori81/wagtail,nrsimha/wagtail,janusnic/wagtail,jorge-marques/wagtail,chimeno/wagtail,wagtail/wagtail,kurtw/wagtail,inonit/wagtail,stevenewey/wagtail,100Shapes/wagtail,Pennebaker/wagtail,iho/wagtail,FlipperPA/wagtail,jorge-marques/wagtail,FlipperPA/wagtail,davecranwell/wagtail,rsalmaso/wagtail,jordij/wagtail,chrxr/wagtail,hamsterbacke23/wagtail,thenewguy/wagtail,lojack/wagtail,dresiu/wagtail,davecranwell/wagtail,timorieber/wagtail,rsalmaso/wagtail,tangentlabs/wagtail,KimGlazebrook/wagtail-experiment,darith27/wagtail,Toshakins/wagtail,dresiu/wagtail,kurtw/wagtail,nutztherookie/wagtail,jnns/wagtail,thenewguy/wagtail,takeshineshiro/wagtail,mjec/wagtail,hanpama/wagtail,inonit/wagtail,chimeno/wagtail,m-sanders/wagtail,takeflight/wagtail,taedori81/wagtail,nutztherookie/wagtail,gasman/wagtail,zerolab/wagtail | wagtail/wagtailadmin/templatetags/wagtailuserbar.py | wagtail/wagtailadmin/templatetags/wagtailuserbar.py | import warnings
from django import template
from wagtail.wagtailadmin.views import userbar
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.simple_tag(takes_context=True)
def wagtailuserbar(context, css_path=None):
if css_path is not None:
warnings.warn(
"Passing a CSS path to the wagtailuserbar tag is no longer required; use {% wagtailuserbar %} instead",
DeprecationWarning
)
# Find request object
request = context['request']
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return ''
# Find page object
if 'self' in context and isinstance(context['self'], Page) and context['self'].id is not None:
pass
else:
return ''
# Render edit bird
return userbar.render_edit_frame(request, context) or '' | from django import template
from wagtail.wagtailadmin.views import userbar
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.simple_tag(takes_context=True)
def wagtailuserbar(context, current_page=None, items=None):
# Find request object
request = context['request']
# Don't render if user doesn't have permission to access the admin area
if not request.user.has_perm('wagtailadmin.access_admin'):
return ''
# Find page object
if 'self' in context and isinstance(context['self'], Page) and context['self'].id is not None:
pass
else:
return ''
# Render edit bird
return userbar.render_edit_frame(request, context) or '' | bsd-3-clause | Python |
153f7b28e5b4763dd41f95b4840dcf56d9895393 | Update bot.py | devrand/djcourse | code/bot1/bot.py | code/bot1/bot.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import tweepy, time, sys # pip install tweepy
import sys
sys.path.append("..")
from course_config import *
argfile = str(sys.argv[1])
# go to https://dev.twitter.com/ and register application
# you need CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
filename=open(argfile,'r')
f=filename.readlines()
filename.close()
for line in f:
api.update_status(line)
time.sleep(30) #Tweet every 15 minutes
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import tweepy, time, sys # pip install tweepy
import sys
sys.path.append("..")
from course_config import *
argfile = str(sys.argv[1])
# need CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
filename=open(argfile,'r')
f=filename.readlines()
filename.close()
for line in f:
api.update_status(line)
time.sleep(30) #Tweet every 15 minutes
| mit | Python |
9e2bdfece7f5cd9e02b15e9fe11c432e10a12418 | update api tests | richardhsu/naarad,linkedin/naarad,richardhsu/naarad,kilink/naarad,linkedin/naarad,forever342/naarad,linkedin/naarad,linkedin/naarad,kilink/naarad,forever342/naarad,kilink/naarad,kilink/naarad,richardhsu/naarad,forever342/naarad,forever342/naarad,richardhsu/naarad | test/test_naarad_api.py | test/test_naarad_api.py | # coding=utf-8
"""
© 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");?you may not use this file except in compliance with the License.?You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software?distributed under the License is distributed on an "AS IS" BASIS,?WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
import os
import sys
import time
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src')))
from naarad import Naarad
import naarad.naarad_constants as CONSTANTS
naarad_obj = None
def setup_module():
global naarad_obj
naarad_obj = Naarad()
def test_naarad_start_stop():
"""
:return: None
"""
examples_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'examples')
global naarad_obj
test_id_1 = naarad_obj.signal_start(os.path.join(os.path.join(examples_directory, 'conf'),'config-gc'))
time.sleep(60)
naarad_obj.signal_stop(test_id_1)
test_id_2 = naarad_obj.signal_start(os.path.join(os.path.join(examples_directory, 'conf'),'config-gc'))
time.sleep(60)
naarad_obj.signal_stop(test_id_2)
if naarad_obj.analyze(os.path.join(examples_directory,'logs'), 'test_api_temp') != CONSTANTS.OK :
naarad_obj.get_failed_analyses()
naarad_obj.get_sla_data(test_id_1)
naarad_obj.get_stats_data(test_id_1)
naarad_obj.get_sla_data(test_id_2)
naarad_obj.get_stats_data(test_id_2)
if naarad_obj.diff(test_id_1, test_id_2, None) != CONSTANTS.OK:
print 'Error encountered during diff'
if naarad_obj.diff_reports_by_location('test_api_temp/0', 'test_api_temp/1', 'test_api_temp/diff_location', None):
print 'Error encountered during diff'
print 'Please inspect the generated reports manually'
setup_module()
test_naarad_start_stop()
| # coding=utf-8
"""
© 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");?you may not use this file except in compliance with the License.?You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software?distributed under the License is distributed on an "AS IS" BASIS,?WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
import os
import sys
import time
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src')))
from naarad import Naarad
import naarad.naarad_constants as CONSTANTS
naarad_obj = None
def setup_module():
global naarad_obj
naarad_obj = Naarad()
def test_naarad_start_stop():
"""
:return: None
"""
examples_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'examples')
global naarad_obj
test_id_1 = naarad_obj.signal_start(os.path.join(os.path.join(examples_directory, 'conf'),'config-gc'))
time.sleep(60)
naarad_obj.signal_stop(test_id_1)
test_id_2 = naarad_obj.signal_start(os.path.join(os.path.join(examples_directory, 'conf'),'config-gc'))
time.sleep(60)
naarad_obj.signal_stop(test_id_2)
if naarad_obj.analyze(os.path.join(examples_directory,'logs'), 'test_api_temp') != CONSTANTS.OK :
naarad_obj.get_failed_analyses()
naarad_obj.get_sla_data(test_id_1)
naarad_obj.get_stats_data(test_id_1)
naarad_obj.get_sla_data(test_id_2)
naarad_obj.get_stats_data(test_id_2)
if naarad_obj.diff(test_id_1, test_id_2, None) != CONSTANTS.OK:
print 'Error encountered during diff'
print 'Please inspect the generated reports manually'
setup_module()
test_naarad_start_stop()
| apache-2.0 | Python |
ee1532cc226987904666eeb0bda61445455d04e3 | Increase test timeout | panda73111/aiohttp,KeepSafe/aiohttp,singulared/aiohttp,rutsky/aiohttp,arthurdarcet/aiohttp,alex-eri/aiohttp-1,panda73111/aiohttp,z2v/aiohttp,arthurdarcet/aiohttp,moden-py/aiohttp,alex-eri/aiohttp-1,panda73111/aiohttp,moden-py/aiohttp,moden-py/aiohttp,pfreixes/aiohttp,hellysmile/aiohttp,arthurdarcet/aiohttp,juliatem/aiohttp,singulared/aiohttp,alex-eri/aiohttp-1,KeepSafe/aiohttp,AraHaanOrg/aiohttp,esaezgil/aiohttp,playpauseandstop/aiohttp,hellysmile/aiohttp,esaezgil/aiohttp,jettify/aiohttp,z2v/aiohttp,esaezgil/aiohttp,Eyepea/aiohttp,rutsky/aiohttp,juliatem/aiohttp,rutsky/aiohttp,pfreixes/aiohttp,singulared/aiohttp,KeepSafe/aiohttp,AraHaanOrg/aiohttp,jettify/aiohttp,jettify/aiohttp,z2v/aiohttp | tests/test_run_app.py | tests/test_run_app.py | import ssl
from unittest import mock
from aiohttp import web
def test_run_app_http(loop, mocker):
mocker.spy(loop, 'create_server')
loop.call_later(0.05, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
web.run_app(app, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080,
ssl=None, backlog=128)
app.startup.assert_called_once_with()
def test_run_app_https(loop, mocker):
mocker.spy(loop, 'create_server')
loop.call_later(0.05, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
ssl_context = ssl.create_default_context()
web.run_app(app, ssl_context=ssl_context, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8443,
ssl=ssl_context, backlog=128)
app.startup.assert_called_once_with()
def test_run_app_nondefault_host_port(loop, unused_port, mocker):
port = unused_port()
host = 'localhost'
mocker.spy(loop, 'create_server')
loop.call_later(0.05, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
web.run_app(app, host=host, port=port, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, host, port,
ssl=None, backlog=128)
app.startup.assert_called_once_with()
def test_run_app_custom_backlog(loop, mocker):
mocker.spy(loop, 'create_server')
loop.call_later(0.05, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
web.run_app(app, backlog=10, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080,
ssl=None, backlog=10)
app.startup.assert_called_once_with()
| import ssl
from unittest import mock
from aiohttp import web
def test_run_app_http(loop, mocker):
mocker.spy(loop, 'create_server')
loop.call_later(0.02, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
web.run_app(app, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080,
ssl=None, backlog=128)
app.startup.assert_called_once_with()
def test_run_app_https(loop, mocker):
mocker.spy(loop, 'create_server')
loop.call_later(0.02, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
ssl_context = ssl.create_default_context()
web.run_app(app, ssl_context=ssl_context, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8443,
ssl=ssl_context, backlog=128)
app.startup.assert_called_once_with()
def test_run_app_nondefault_host_port(loop, unused_port, mocker):
port = unused_port()
host = 'localhost'
mocker.spy(loop, 'create_server')
loop.call_later(0.02, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
web.run_app(app, host=host, port=port, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, host, port,
ssl=None, backlog=128)
app.startup.assert_called_once_with()
def test_run_app_custom_backlog(loop, mocker):
mocker.spy(loop, 'create_server')
loop.call_later(0.02, loop.stop)
app = web.Application(loop=loop)
mocker.spy(app, 'startup')
web.run_app(app, backlog=10, print=lambda *args: None)
assert loop.is_closed()
loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080,
ssl=None, backlog=10)
app.startup.assert_called_once_with()
| apache-2.0 | Python |
d8b322439a5fdaf31ec52dc7c2a2ff9e18c12316 | solve import error on install magpie | Ouranosinc/Magpie,Ouranosinc/Magpie,Ouranosinc/Magpie | magpie/__init__.py | magpie/__init__.py | # -*- coding: utf-8 -*-
import logging
import sys
LOGGER = logging.getLogger(__name__)
def includeme(config):
# import needs to be here, otherwise ImportError happens during setup.py install (modules not yet installed)
from magpie import constants
LOGGER.info("Adding MAGPIE_MODULE_DIR='{}' to path.".format(constants.MAGPIE_MODULE_DIR))
sys.path.insert(0, constants.MAGPIE_MODULE_DIR)
# include magpie components (all the file which define includeme)
config.include('cornice')
config.include('cornice_swagger')
config.include('pyramid_chameleon')
config.include('pyramid_mako')
config.include('magpie.definitions')
config.include('magpie.api')
config.include('magpie.db')
config.include('magpie.ui')
| # -*- coding: utf-8 -*-
from magpie import constants
import logging
import sys
LOGGER = logging.getLogger(__name__)
def includeme(config):
LOGGER.info("Adding MAGPIE_MODULE_DIR='{}' to path.".format(constants.MAGPIE_MODULE_DIR))
sys.path.insert(0, constants.MAGPIE_MODULE_DIR)
# include magpie components (all the file which define includeme)
config.include('cornice')
config.include('cornice_swagger')
config.include('pyramid_chameleon')
config.include('pyramid_mako')
config.include('magpie.definitions')
config.include('magpie.api')
config.include('magpie.db')
config.include('magpie.ui')
| apache-2.0 | Python |
5dfcd4ea8633a6bc658cccd654fce2cc7c217269 | Add helpful message to end of installer. | tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff | nbdiff/install.py | nbdiff/install.py | from __future__ import print_function
from . import __path__ as NBDIFF_PATH
import subprocess
import re
import os
import shutil
import sys
def install():
profile_name = 'nbdiff'
create_cmd = ['ipython', 'profile', 'create', profile_name]
message = subprocess.Popen(create_cmd, stderr=subprocess.PIPE)
message_str = message.stderr.read()
re_msgline = \
re.compile(r'^.ProfileCre.*u\'(?P<profilepath>.*)ipython_config\.py.$')
profile_paths = [
re_msgline.match(line).groups()[0]
for line in message_str.splitlines()
if re_msgline.match(line)
]
if len(profile_paths) == 0:
sys.stderr.write(
"It looks like creating the ipython profile "
"didn't work. Maybe you've already installed it?\n"
)
sys.exit(-1)
profile_path = profile_paths[0]
extension_copy_from = os.path.join(NBDIFF_PATH[0], 'extension/static')
extension_copy_dest = os.path.join(profile_path, 'static')
shutil.copytree(extension_copy_from, extension_copy_dest)
print("Finished installing NBDiff extension in profile `nbdiff`.")
| from . import __path__ as NBDIFF_PATH
import subprocess
import re
import os
import shutil
import sys
def install():
profile_name = 'nbdiff'
create_cmd = ['ipython', 'profile', 'create', profile_name]
message = subprocess.Popen(create_cmd, stderr=subprocess.PIPE)
message_str = message.stderr.read()
re_msgline = \
re.compile(r'^.ProfileCre.*u\'(?P<profilepath>.*)ipython_config\.py.$')
profile_paths = [
re_msgline.match(line).groups()[0]
for line in message_str.splitlines()
if re_msgline.match(line)
]
if len(profile_paths) == 0:
sys.stderr.write("It looks like creating the ipython profile "
"didn't work. Maybe you've already installed it?\n")
sys.exit(-1)
profile_path = profile_paths[0]
extension_copy_from = os.path.join(NBDIFF_PATH[0], 'extension/static')
extension_copy_dest = os.path.join(profile_path, 'static')
print extension_copy_from
print extension_copy_dest
shutil.copytree(extension_copy_from, extension_copy_dest)
print profile_path
| mit | Python |
390fa07c191d79290b1ef83c268f38431f68093a | Fix import in test client. | riga/jsonrpyc | tests/clients/simple.py | tests/clients/simple.py | # -*- coding: utf-8 -*-
import os
import sys
base = os.path.dirname(os.path.abspath(__file__))
sys.path.append(base)
from base import jsonrpyc
class MyClass(object):
def one(self):
return 1
def twice(self, n):
return n * 2
def arglen(self, *args, **kwargs):
return len(args) + len(kwargs)
if __name__ == "__main__":
rpc = jsonrpyc.RPC(MyClass())
| # -*- coding: utf-8 -*-
from base import jsonrpyc
class MyClass(object):
def one(self):
return 1
def twice(self, n):
return n * 2
def arglen(self, *args, **kwargs):
return len(args) + len(kwargs)
if __name__ == "__main__":
rpc = jsonrpyc.RPC(MyClass())
| mit | Python |
7c6754a439f8fa1c7ebe5c12b9c51651c02c35c4 | 修改post参数,添加全局editor配置 | SilverBlogTeam/SilverBlog,SilverBlogTeam/SilverBlog | manage/new_post.py | manage/new_post.py | import datetime
import json
import os.path
import re
import shutil
from pypinyin import lazy_pinyin
from common import file
from manage import get_excerpt
def get_name(nameinput):
name_raw = re.sub("[\s+\.\!\/_,$%^*(+\"\']+|[+——!,。?、~@#¥%……&*()]+", "", nameinput)
namelist = lazy_pinyin(name_raw)
name = ""
for item in namelist:
name = name + "-" + item
return name[1:len(name)]
def new_post(name, title, filename, editor):
if len(name) == 0:
name = get_name(title)
if os.path.isfile(filename):
shutil.copyfile(filename, "./document/{0}.md".format(name))
else:
if editor is not None:
os.system("{0} ./document/{1}.md".format(editor, name))
excerpt = get_excerpt.get_excerpt("./document/{0}.md".format(name))
post_info = {"name": name, "title": title, "excerpt": excerpt, "time": str(datetime.date.today())}
if os.path.isfile("./config/page.json"):
page_list = json.loads(file.read_file("./config/page.json"))
else:
page_list = list()
page_list.insert(0, post_info)
file.write_file("./config/page.json", json.dumps(page_list, ensure_ascii=False))
def new_post_init(config_file=None, editor="None"):
if config_file is not None and os.path.exists(config_file):
config = json.loads(file.read_file(config_file))
title = config["title"]
name = config["name"]
filename = config["file"]
else:
title = input("Please enter the title of the article:")
name = input("Please enter the URL (Leave a blank use pinyin):")
filename = input("Please enter the file path to copy (blank or Non-existent will be new):")
if editor=="None":
system_info = json.loads(file.read_file("./config/system.json"))
if "Editor" in system_info:
editor=system_info["Editor"]
else:
editor=None
new_post(name, title, filename, editor)
print("Success!")
| import datetime
import json
import os.path
import re
import shutil
from pypinyin import lazy_pinyin
from common import file
from manage import get_excerpt
def get_name(nameinput):
name_raw = re.sub("[\s+\.\!\/_,$%^*(+\"\']+|[+——!,。?、~@#¥%……&*()]+", "", nameinput)
namelist = lazy_pinyin(name_raw)
name = ""
for item in namelist:
name = name + "-" + item
return name[1:len(name)]
def new_post(name, title, filename, editor):
if len(name) == 0:
name = get_name(title)
if os.path.isfile(filename):
shutil.copyfile(filename, "./document/{0}.md".format(name))
else:
if editor is not None:
os.system("{0} ./document/{1}.md".format(editor, name))
excerpt = get_excerpt.get_excerpt("./document/{0}.md".format(name))
post_info = {"name": name, "title": title, "excerpt": excerpt, "time": str(datetime.date.today())}
if os.path.isfile("./config/page.json"):
page_list = json.loads(file.read_file("./config/page.json"))
else:
page_list = list()
page_list.insert(0, post_info)
file.write_file("./config/page.json", json.dumps(page_list, ensure_ascii=False))
def new_post_init(config_file=None, editor="vim"):
if config_file is not None and os.path.exists(config_file):
config = json.loads(file.read_file(config_file))
title = config["title"]
name = config["name"]
filename = config["file"]
else:
title = input("Please enter the title of the article:")
name = input("Please enter the URL (Leave a blank use pinyin):")
filename = input("Please enter the file path to copy (blank or Non-existent will be new):")
new_post(name, title, filename, editor)
print("Success!")
| bsd-3-clause | Python |
7bd2bfa8deb59c97f7630ed10fe70fd7e8bd8587 | Update dependency bazelbuild/bazel to latest version | google/copybara,google/copybara,google/copybara | third_party/bazel.bzl | third_party/bazel.bzl | # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "50ce3f973cbc96a0326560a31b736a4f0ca8dc62"
bazel_sha256 = "977e63bacdec2cc29192ed52ea251915d4eda12c0cc666b4e71aade947404442"
| # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "d9ebac9c2bcca821902b86cdb5f1297790bba2f9"
bazel_sha256 = "f648383e43e4172a6787dcde60365091ff4dbced0485bbf9c4b515d5f2c96139"
| apache-2.0 | Python |
73d0be7a432340b4ecd140ad1cc8792d3f049779 | Use SelfAttribute instead of explicit lambda | agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft | tests/factories/user.py | tests/factories/user.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
| # -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.LazyAttribute(lambda o: o.room.address)
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
| apache-2.0 | Python |
5c1a404353a0cdcd49610a21d7d19b79898ac7e3 | make mpi example a little more verbose | JensTimmerman/radical.pilot,JensTimmerman/radical.pilot,JensTimmerman/radical.pilot,JensTimmerman/radical.pilot | tests/helloworld_mpi.py | tests/helloworld_mpi.py | #!/usr/bin/env python
# This is an example MPI4Py program that is used
# by different examples and tests.
import sys
import time
import traceback
from mpi4py import MPI
try :
print "start"
SLEEP = 10
name = MPI.Get_processor_name()
comm = MPI.COMM_WORLD
print "mpi rank %d/%d/%s" % (comm.rank, comm.size, name)
time.sleep(SLEEP)
comm.Barrier() # wait for everybody to synchronize here
except Exception as e :
traceback.print_exc ()
print "error : %s" % s
sys.exit (1)
finally :
print "done"
sys.exit (0)
| #!/usr/bin/env python
# This is an example MPI4Py program that is used
# by different examples and tests.
from mpi4py import MPI
import time
SLEEP = 10
name = MPI.Get_processor_name()
comm = MPI.COMM_WORLD
print "mpi rank %d/%d/%s" % (comm.rank, comm.size, name)
time.sleep(SLEEP)
comm.Barrier() # wait for everybody to synchronize here
| mit | Python |
3bb6017897f9b8c859c2d3879c2e9d51b899f57c | Increase number of iterations for xor neural net | WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox | neuralnets/xor.py | neuralnets/xor.py | import numpy as np
from net import NeuralNet
net = NeuralNet(2, 1, 3, 1, 342047)
output_dot = True
inputs = np.array([[1,1],
[0,0],
[1,0],
[0,1]])
outputs = np.array([[0],
[0],
[1],
[1]])
for i in xrange(80000):
if i % 100 == 0 and output_dot:
open("/tmp/xor{:05d}graph".format(i), mode="w").write(net.output_dot((inputs,outputs)))
net.learn(inputs, outputs, 0.05)
print("trained")
print("error: {}".format(net.error(inputs, outputs)))
for inpt in inputs:
print(net.forward(inpt))
| import numpy as np
from net import NeuralNet
net = NeuralNet(2, 1, 3, 1, 342047)
output_dot = True
inputs = np.array([[1,1],
[0,0],
[1,0],
[0,1]])
outputs = np.array([[0],
[0],
[1],
[1]])
for i in xrange(50000):
if i % 100 == 0 and output_dot:
open("/tmp/xor{:05d}graph".format(i), mode="w").write(net.output_dot((inputs,outputs)))
net.learn(inputs, outputs, 0.05)
print("trained")
print("error: {}".format(net.error(inputs, outputs)))
for inpt in inputs:
print(net.forward(inpt))
| mit | Python |
2c37ed091baf12e53885bfa06fdb835bb8de1218 | Add Bitbucket to skipif marker reason | pjbull/cookiecutter,atlassian/cookiecutter,sp1rs/cookiecutter,0k/cookiecutter,cichm/cookiecutter,takeflight/cookiecutter,atlassian/cookiecutter,kkujawinski/cookiecutter,foodszhang/cookiecutter,christabor/cookiecutter,ramiroluz/cookiecutter,cguardia/cookiecutter,tylerdave/cookiecutter,benthomasson/cookiecutter,Springerle/cookiecutter,nhomar/cookiecutter,vincentbernat/cookiecutter,stevepiercy/cookiecutter,kkujawinski/cookiecutter,jhermann/cookiecutter,luzfcb/cookiecutter,drgarcia1986/cookiecutter,ramiroluz/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,terryjbates/cookiecutter,dajose/cookiecutter,christabor/cookiecutter,ionelmc/cookiecutter,nhomar/cookiecutter,foodszhang/cookiecutter,venumech/cookiecutter,tylerdave/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,michaeljoseph/cookiecutter,lgp171188/cookiecutter,0k/cookiecutter,janusnic/cookiecutter,lgp171188/cookiecutter,vintasoftware/cookiecutter,ionelmc/cookiecutter,Springerle/cookiecutter,cichm/cookiecutter,pjbull/cookiecutter,agconti/cookiecutter,jhermann/cookiecutter,Vauxoo/cookiecutter,agconti/cookiecutter,willingc/cookiecutter,lucius-feng/cookiecutter,hackebrot/cookiecutter,moi65/cookiecutter,moi65/cookiecutter,Vauxoo/cookiecutter,stevepiercy/cookiecutter,dajose/cookiecutter,audreyr/cookiecutter,lucius-feng/cookiecutter,venumech/cookiecutter,takeflight/cookiecutter,drgarcia1986/cookiecutter,terryjbates/cookiecutter,janusnic/cookiecutter,sp1rs/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,benthomasson/cookiecutter,michaeljoseph/cookiecutter,vintasoftware/cookiecutter | tests/skipif_markers.py | tests/skipif_markers.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
skipif_markers
--------------
Contains pytest skipif markers to be used in the suite.
"""
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = (
'Fails on Travis or else there is no network connection to '
'GitHub/Bitbucket.'
)
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
skipif_markers
--------------
Contains pytest skipif markers to be used in the suite.
"""
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = 'Fails on Travis or else there is no network connection to GitHub'
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
| bsd-3-clause | Python |
44dac786339716ad8cc05f6790b73b5fc47be812 | Remove extra comma to avoid flake8 test failure in CircleCI | yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core | config/jinja2.py | config/jinja2.py | from django.urls import reverse
from django.utils import translation
from django.template.backends.jinja2 import Jinja2
from jinja2 import Environment
class FoodsavingJinja2(Jinja2):
app_dirname = 'templates'
def environment(**options):
env = Environment(extensions=['jinja2.ext.i18n'], **options)
env.globals.update({
'url': reverse,
})
env.install_gettext_translations(translation)
env.install_null_translations()
return env
| from django.urls import reverse
from django.utils import translation
from django.template.backends.jinja2 import Jinja2
from jinja2 import Environment
class FoodsavingJinja2(Jinja2):
app_dirname = 'templates'
def environment(**options):
env = Environment(extensions=['jinja2.ext.i18n',], **options)
env.globals.update({
'url': reverse,
})
env.install_gettext_translations(translation)
env.install_null_translations()
return env
| agpl-3.0 | Python |
b447fa44ca1dd2e9d21af4ce61ee6092fe3c94ec | Update test_cmatrices to new interface | Radiomics/pyradiomics,Radiomics/pyradiomics,Radiomics/pyradiomics,Radiomics/pyradiomics | tests/test_cmatrices.py | tests/test_cmatrices.py | # to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
testCases = ('brain1', 'brain2', 'breast1', 'lung1', 'lung2')
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, test, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', test, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, test)
testImage = testUtils.getImage('original')
testMask = testUtils.getMask('original')
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getSettings())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = featureClass._calculateCMatrix()
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
| # to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
testCases = ('brain1', 'brain2', 'breast1', 'lung1', 'lung2')
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, testCase, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, testCase)
testImage = testUtils.getImage()
testMask = testUtils.getMask()
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getSettings())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = featureClass._calculateCMatrix()
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
| bsd-3-clause | Python |
3b408ed7702100b7f1755f819e05bb61b1740957 | add medialab events search- left todo: json and date | OSWeekends/EventPoints,OSWeekends/EventPoints | media_lab_prado.py | media_lab_prado.py | # http://medialab-prado.es/events/2016-12-01
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
import urllib.request
import datetime
date = "2017-01-02"
url = "http://medialab-prado.es/events/" + date
request = urllib.request.urlopen(url)
if request.getcode() == 200:
request = request.read()
soup = BeautifulSoup(request, "html.parser")
pageevents = soup.find("ul", { "class" : "lista"}).findChildren("a")
for event in pageevents:
if event.text == "Seguir leyendo…":
event_url = event['href']
request2 = urllib.request.urlopen(event_url)
if request2.getcode() == 200:
request2 = request2.read()
soup = BeautifulSoup(request2, "html.parser")
location = soup.find("div", { "class" : "lugar"})
if location == None:
location = "MediaLab"
else:
location = location.find("p")
print (location)
description = soup.find("div", { "class" : "entradilla"})
print(description.text)
| # http://medialab-prado.es/events/2016-12-01 | mit | Python |
769e6209db066b8b5908426850fd300fd29098e8 | Fix codemirror mode and language name | ryanpepper/tcl_kernel | tcl_kernel/kernel.py | tcl_kernel/kernel.py | from ipykernel.kernelbase import Kernel
try:
import Tkinter
except ImportError:
import tkinter as Tkinter
__version__ = '0.0.1'
class TclKernel(Kernel):
implementation = 'tcl_kernel'
implementation_version = __version__
language_info = {'name': 'Tcl',
'codemirror_mode': 'Tcl',
'mimetype': 'text/x-script.tcl',
'file_extension': '.tcl'}
banner = "Tcl Kernel"
def __init__(self, **kwargs):
Kernel.__init__(self, **kwargs)
self.tcl = Tkinter.Tcl()
self.execution_count = 0
putsredef = 'rename puts original_puts \nproc puts {args} {\n if {[llength $args] == 1} {\n return "=> [lindex $args 0]"\n } else {\n eval original_puts $args\n }\n}\n'
self.tcl.eval(putsredef)
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
try:
output = self.tcl.eval(code.rstrip())
if not silent:
stream_content = {'name': 'stdout', 'text': output[3:]}
self.send_response(self.iopub_socket, 'stream', stream_content)
except Tkinter.TclError as scripterr:
output = "Tcl Error: " + scripterr.args[0]
if not silent:
stream_content = {
'name': 'stderr', 'text': output}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'ok', 'execution_count': self.execution_count,
'payload': [], 'user_expressions': {}}
| from ipykernel.kernelbase import Kernel
try:
import Tkinter
except ImportError:
import tkinter as Tkinter
__version__ = '0.0.1'
class TclKernel(Kernel):
implementation = 'tcl_kernel'
implementation_version = __version__
language_info = {'name': 'bash',
'codemirror_mode': 'shell',
'mimetype': 'text/x-script.tcl',
'file_extension': '.tcl'}
banner = "Tcl Kernel"
def __init__(self, **kwargs):
Kernel.__init__(self, **kwargs)
self.tcl = Tkinter.Tcl()
self.execution_count = 0
putsredef = 'rename puts original_puts \nproc puts {args} {\n if {[llength $args] == 1} {\n return "=> [lindex $args 0]"\n } else {\n eval original_puts $args\n }\n}\n'
self.tcl.eval(putsredef)
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
try:
output = self.tcl.eval(code.rstrip())
if not silent:
stream_content = {'name': 'stdout', 'text': output[3:]}
self.send_response(self.iopub_socket, 'stream', stream_content)
except Tkinter.TclError as scripterr:
output = "Tcl Error: " + scripterr.args[0]
if not silent:
stream_content = {
'name': 'stderr', 'text': output}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'ok', 'execution_count': self.execution_count,
'payload': [], 'user_expressions': {}}
| bsd-3-clause | Python |
c517eb40b73151a9b14f46f1991ab692d8b81702 | Add docstring for simulation class methods | kbsezginel/tee_mof,kbsezginel/tee_mof | teemof/simulation.py | teemof/simulation.py | # Date: August 2017
# Author: Kutay B. Sezginel
"""
Simulation class for reading and initializing Lammps simulations
"""
import pprint
from teemof.read import read_run, read_trial, read_trial_set
from teemof.parameters import k_parameters, plot_parameters
from teemof.visualize import plot_thermal_conductivity, plot_distance_histogram
class Simulation:
"""
Reading and initializing Lammps simulations
"""
def __init__(self, read=None, setup=None, parameters=k_parameters.copy()):
"""
Create a Lammps simulation object.
"""
self.parameters = parameters
self.plot_parameters = plot_parameters.copy()
if read is not None and setup is not None:
self.read(read, setup)
self.setup = setup
def read(self, sim_dir, setup='run'):
"""
Read Lammps simulation results from given directory.
"""
self.setup = setup
if setup == 'run':
self.run = read_run(sim_dir, k_par=self.parameters)
elif setup == 'trial':
self.trial = read_trial(sim_dir, k_par=self.parameters)
elif setup == 'trial_set':
self.trial_set = read_trial_set(sim_dir, k_par=self.parameters)
else:
print('Select setup: "run" | "trial" | "trial_set"')
def initialize(self):
"""
Initialize input files for a Lammps simulation.
"""
pass
def plot(self, selection):
"""
Plot Lammps simulation results.
"""
if selection == 'k':
plot_data = {}
plot_data['x'] = self.trial['data']['Run1']['time']
plot_data['y'] = [self.trial['data'][run]['k']['iso'] for run in self.trial['runs']]
plot_data['legend'] = self.trial['runs']
plot_thermal_conductivity(plot_data, self.plot_parameters['k'])
elif selection == 'hist':
plot_data = {}
plot_distance_histogram(plot_data, self.plot_parameters['hist'])
else:
print('Select plot: "k" | "k_est" | "hist"')
def show_parameters(self):
"""
Show thermal conductivity parameters.
"""
pprint.pprint(self.parameters)
def show_plot_parameters(self):
"""
Show plot parameters.
"""
pprint.pprint(self.plot_parameters)
| # Date: August 2017
# Author: Kutay B. Sezginel
"""
Simulation class for reading and initializing Lammps simulations
"""
import pprint
from teemof.read import read_run, read_trial, read_trial_set
from teemof.parameters import k_parameters, plot_parameters
from teemof.visualize import plot_thermal_conductivity, plot_distance_histogram
class Simulation:
"""
Reading and initializing Lammps simulations
"""
def __init__(self, read=None, setup=None, parameters=k_parameters.copy()):
self.parameters = parameters
self.plot_parameters = plot_parameters.copy()
if read is not None and setup is not None:
self.read(read, setup)
self.setup = setup
def read(self, sim_dir, setup='run'):
self.setup = setup
if setup == 'run':
self.run = read_run(sim_dir, k_par=self.parameters)
elif setup == 'trial':
self.trial = read_trial(sim_dir, k_par=self.parameters)
elif setup == 'trial_set':
self.trial_set = read_trial_set(sim_dir, k_par=self.parameters)
else:
print('Select setup: "run" | "trial" | "trial_set"')
def initialize(self):
pass
def plot(self, selection):
if selection == 'k':
plot_data = {}
plot_data['x'] = self.trial['data']['Run1']['time']
plot_data['y'] = [self.trial['data'][run]['k']['iso'] for run in self.trial['runs']]
plot_data['legend'] = self.trial['runs']
plot_thermal_conductivity(plot_data, self.plot_parameters['k'])
elif selection == 'hist':
plot_data = {}
plot_distance_histogram(plot_data, self.plot_parameters['hist'])
else:
print('Select plot: "k" | "k_est" | "hist"')
def show_parameters(self):
pprint.pprint(self.parameters)
def show_plot_parameters(self):
pprint.pprint(self.plot_parameters)
| mit | Python |
b646e4f376db710101e2c1825bd384b2727e6a79 | Disable on win32 | stoq/kiwi | tests/test_dateentry.py | tests/test_dateentry.py | import sys
import datetime
import unittest
from kiwi.ui.dateentry import DateEntry
class TestDateEntry(unittest.TestCase):
def setUp(self):
self.date = datetime.date.today()
def testGetSetDate(self):
if sys.platform == 'win32':
return
entry = DateEntry()
entry.set_date(self.date)
self.assertEqual(entry.get_date(), self.date)
if __name__ == '__main__':
unittest.main()
| import datetime
import unittest
from kiwi.ui.dateentry import DateEntry
class TestDateEntry(unittest.TestCase):
def setUp(self):
self.date = datetime.date.today()
def testGetSetDate(self):
entry = DateEntry()
entry.set_date(self.date)
self.assertEqual(entry.get_date(), self.date)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
17ad68fe77b124fa760857c9e93cbd3d4f9d293e | Write XML of input file to tempdir as well | khaledhosny/psautohint,khaledhosny/psautohint | tests/test_hintfonts.py | tests/test_hintfonts.py | from __future__ import print_function, division, absolute_import
import glob
from os.path import basename
import pytest
from fontTools.misc.xmlWriter import XMLWriter
from fontTools.cffLib import CFFFontSet
from fontTools.ttLib import TTFont
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import DATA_DIR
class Options(ACOptions):
def __init__(self, inpath, outpath):
super(Options, self).__init__()
self.inputPaths = [inpath]
self.outputPaths = [outpath]
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("ufo", glob.glob("%s/*/*/font.ufo" % DATA_DIR))
def test_ufo(ufo, tmpdir):
out = str(tmpdir / basename(ufo))
options = Options(ufo, out)
hintFiles(options)
assert differ([ufo, out])
@pytest.mark.parametrize("otf", glob.glob("%s/*/*/font.otf" % DATA_DIR))
def test_otf(otf, tmpdir):
out = str(tmpdir / basename(otf)) + ".out"
options = Options(otf, out)
hintFiles(options)
for path in (otf, out):
font = TTFont(path)
assert "CFF " in font
writer = XMLWriter(str(tmpdir / basename(path)) + ".xml")
font["CFF "].toXML(writer, font)
del writer
del font
assert differ([str(tmpdir / basename(otf)) + ".xml",
str(tmpdir / basename(out)) + ".xml"])
@pytest.mark.parametrize("cff", glob.glob("%s/*/*/font.cff" % DATA_DIR))
def test_cff(cff, tmpdir):
out = str(tmpdir / basename(cff)) + ".out"
options = Options(cff, out)
hintFiles(options)
for path in (cff, out):
font = CFFFontSet()
writer = XMLWriter(str(tmpdir / basename(path)) + ".xml")
with open(path, "rb") as fp:
font.decompile(fp, None)
font.toXML(writer)
del writer
del font
assert differ([str(tmpdir / basename(cff)) + ".xml",
str(tmpdir / basename(out)) + ".xml"])
| from __future__ import print_function, division, absolute_import
import glob
from os.path import basename
import pytest
from fontTools.misc.xmlWriter import XMLWriter
from fontTools.cffLib import CFFFontSet
from fontTools.ttLib import TTFont
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import DATA_DIR
class Options(ACOptions):
def __init__(self, inpath, outpath):
super(Options, self).__init__()
self.inputPaths = [inpath]
self.outputPaths = [outpath]
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("ufo", glob.glob("%s/*/*/font.ufo" % DATA_DIR))
def test_ufo(ufo, tmpdir):
out = str(tmpdir / basename(ufo))
options = Options(ufo, out)
hintFiles(options)
assert differ([ufo, out])
@pytest.mark.parametrize("otf", glob.glob("%s/*/*/font.otf" % DATA_DIR))
def test_otf(otf, tmpdir):
out = str(tmpdir / basename(otf))
options = Options(otf, out)
hintFiles(options)
for path in (otf, out):
font = TTFont(path)
assert "CFF " in font
writer = XMLWriter(path + ".xml")
font["CFF "].toXML(writer, font)
del writer
del font
assert differ([otf + ".xml", out + ".xml"])
@pytest.mark.parametrize("cff", glob.glob("%s/*/*/font.cff" % DATA_DIR))
def test_cff(cff, tmpdir):
out = str(tmpdir / basename(cff))
options = Options(cff, out)
hintFiles(options)
for path in (cff, out):
font = CFFFontSet()
writer = XMLWriter(path + ".xml")
with open(path, "rb") as fp:
font.decompile(fp, None)
font.toXML(writer)
del writer
del font
assert differ([cff + ".xml", out + ".xml"])
| apache-2.0 | Python |
6e67a9e8eedd959d9d0193e746a375099e9784ef | Use bytes instead of str where appropriate for Python 3 | mwilliamson/toodlepip | toodlepip/consoles.py | toodlepip/consoles.py | class Console(object):
def __init__(self, shell, stdout):
self._shell = shell
self._stdout = stdout
def run(self, description, command, **kwargs):
return self.run_all(description, [command], **kwargs)
def run_all(self, description, commands, quiet=False, cwd=None):
stdout = None if quiet else self._stdout
# TODO: Test printing description
# TODO: detect terminal
self._stdout.write(b'\033[1m')
self._stdout.write(description.encode("utf8"))
self._stdout.write(b"\n")
self._stdout.write(b'\033[0m')
self._stdout.flush()
for command in commands:
# TODO: print command
result = self._shell.run(
command,
stdout=stdout,
stderr=stdout,
cwd=cwd,
allow_error=True
)
if result.return_code != 0:
return Result(result.return_code)
return Result(0)
class Result(object):
def __init__(self, return_code):
self.return_code = return_code
| class Console(object):
def __init__(self, shell, stdout):
self._shell = shell
self._stdout = stdout
def run(self, description, command, **kwargs):
return self.run_all(description, [command], **kwargs)
def run_all(self, description, commands, quiet=False, cwd=None):
stdout = None if quiet else self._stdout
# TODO: Test printing description
# TODO: detect terminal
self._stdout.write('\033[1m')
self._stdout.write(description)
self._stdout.write("\n")
self._stdout.write('\033[0m')
self._stdout.flush()
for command in commands:
# TODO: print command
result = self._shell.run(
command,
stdout=stdout,
stderr=stdout,
cwd=cwd,
allow_error=True
)
if result.return_code != 0:
return Result(result.return_code)
return Result(0)
class Result(object):
def __init__(self, return_code):
self.return_code = return_code
| bsd-2-clause | Python |
006e933a44241e30e1e54c24966d0859aa7c853d | test hub via vanilla, to check imports | cablehead/vanilla | tests/unit/test_core.py | tests/unit/test_core.py | import time
import vanilla
import vanilla.core
def test_lazy():
class C(object):
@vanilla.core.lazy
def now(self):
return time.time()
c = C()
want = c.now
time.sleep(0.01)
assert c.now == want
def test_Scheduler():
s = vanilla.core.Scheduler()
s.add(4, 'f2')
s.add(9, 'f4')
s.add(3, 'f1')
item3 = s.add(7, 'f3')
assert 0.003 - s.timeout() < 0.001
assert len(s) == 4
s.remove(item3)
assert 0.003 - s.timeout() < 0.001
assert len(s) == 3
assert s.pop() == ('f1', ())
assert 0.004 - s.timeout() < 0.001
assert len(s) == 2
assert s.pop() == ('f2', ())
assert 0.009 - s.timeout() < 0.001
assert len(s) == 1
assert s.pop() == ('f4', ())
assert not s
class TestHub(object):
def test_spawn(self):
h = vanilla.Hub()
a = []
h.spawn_later(10, lambda: a.append(1))
h.spawn(lambda: a.append(2))
h.sleep(1)
assert a == [2]
h.sleep(10)
assert a == [2, 1]
def test_exception(self):
h = vanilla.Hub()
def raiser():
raise Exception()
h.spawn(raiser)
h.sleep(1)
a = []
h.spawn(lambda: a.append(2))
h.sleep(1)
assert a == [2]
def test_stop(self):
h = vanilla.Hub()
@h.spawn
def _():
h.sleep(20)
h.stop()
| import time
import vanilla.core
def test_lazy():
class C(object):
@vanilla.core.lazy
def now(self):
return time.time()
c = C()
want = c.now
time.sleep(0.01)
assert c.now == want
def test_Scheduler():
s = vanilla.core.Scheduler()
s.add(4, 'f2')
s.add(9, 'f4')
s.add(3, 'f1')
item3 = s.add(7, 'f3')
assert 0.003 - s.timeout() < 0.001
assert len(s) == 4
s.remove(item3)
assert 0.003 - s.timeout() < 0.001
assert len(s) == 3
assert s.pop() == ('f1', ())
assert 0.004 - s.timeout() < 0.001
assert len(s) == 2
assert s.pop() == ('f2', ())
assert 0.009 - s.timeout() < 0.001
assert len(s) == 1
assert s.pop() == ('f4', ())
assert not s
class TestHub(object):
def test_spawn(self):
h = vanilla.core.Hub()
a = []
h.spawn_later(10, lambda: a.append(1))
h.spawn(lambda: a.append(2))
h.sleep(1)
assert a == [2]
h.sleep(10)
assert a == [2, 1]
def test_exception(self):
h = vanilla.core.Hub()
def raiser():
raise Exception()
h.spawn(raiser)
h.sleep(1)
a = []
h.spawn(lambda: a.append(2))
h.sleep(1)
assert a == [2]
def test_stop(self):
h = vanilla.core.Hub()
@h.spawn
def _():
h.sleep(20)
h.stop()
| mit | Python |
374e10b908fbedf73f3ad40634bb680206da0652 | Add setUp | yuma-m/pychord | test/test_quality.py | test/test_quality.py | # -*- coding: utf-8 -*-
import unittest
from pychord import QualityManager, Chord
class TestQuality(unittest.TestCase):
def setUp(self):
self.quality_manager = QualityManager()
def test_eq(self):
q1 = self.quality_manager.get_quality("m7-5")
q2 = self.quality_manager.get_quality("m7-5")
self.assertEqual(q1, q2)
def test_eq_alias_maj9(self):
q1 = self.quality_manager.get_quality("M9")
q2 = self.quality_manager.get_quality("maj9")
self.assertEqual(q1, q2)
def test_eq_alias_m7b5(self):
q1 = self.quality_manager.get_quality("m7-5")
q2 = self.quality_manager.get_quality("m7b5")
self.assertEqual(q1, q2)
def test_eq_alias_min(self):
q1 = self.quality_manager.get_quality("m")
q2 = self.quality_manager.get_quality("min")
q3 = self.quality_manager.get_quality("-")
self.assertEqual(q1, q2)
self.assertEqual(q1, q3)
def test_invalid_eq(self):
q = self.quality_manager.get_quality("m7")
with self.assertRaises(TypeError):
print(q == 0)
class TestQualityManager(unittest.TestCase):
def test_singleton(self):
quality_manager = QualityManager()
quality_manager2 = QualityManager()
self.assertIs(quality_manager, quality_manager2)
class TestOverwriteQuality(unittest.TestCase):
def setUp(self):
self.quality_manager = QualityManager()
def test_overwrite(self):
self.quality_manager.set_quality("11", (0, 4, 7, 10, 14, 17))
chord = Chord("C11")
self.assertEqual(chord.components(), ['C', 'E', 'G', 'Bb', 'D', 'F'])
def test_keep_existing_chord(self):
chord = Chord("C11")
self.quality_manager.set_quality("11", (0, 4, 7, 10, 14, 17))
self.assertEqual(chord.components(), ['C', 'G', 'Bb', 'D', 'F'])
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
import unittest
from pychord import QualityManager, Chord
class TestQuality(unittest.TestCase):
def setUp(self):
self.quality_manager = QualityManager()
def test_eq(self):
q1 = self.quality_manager.get_quality("m7-5")
q2 = self.quality_manager.get_quality("m7-5")
self.assertEqual(q1, q2)
def test_eq_alias_maj9(self):
q1 = self.quality_manager.get_quality("M9")
q2 = self.quality_manager.get_quality("maj9")
self.assertEqual(q1, q2)
def test_eq_alias_m7b5(self):
q1 = self.quality_manager.get_quality("m7-5")
q2 = self.quality_manager.get_quality("m7b5")
self.assertEqual(q1, q2)
def test_eq_alias_min(self):
q1 = self.quality_manager.get_quality("m")
q2 = self.quality_manager.get_quality("min")
q3 = self.quality_manager.get_quality("-")
self.assertEqual(q1, q2)
self.assertEqual(q1, q3)
def test_invalid_eq(self):
q = self.quality_manager.get_quality("m7")
with self.assertRaises(TypeError):
print(q == 0)
class TestQualityManager(unittest.TestCase):
def test_singleton(self):
quality_manager = QualityManager()
quality_manager2 = QualityManager()
self.assertIs(quality_manager, quality_manager2)
class TestOverwriteQuality(unittest.TestCase):
def test_overwrite(self):
quality_manager = QualityManager()
quality_manager.set_quality("11", (0, 4, 7, 10, 14, 17))
chord = Chord("C11")
self.assertEqual(chord.components(), ['C', 'E', 'G', 'Bb', 'D', 'F'])
def test_keep_existing_chord(self):
chord = Chord("C11")
quality_manager = QualityManager()
quality_manager.set_quality("11", (0, 4, 7, 10, 14, 17))
self.assertEqual(chord.components(), ['C', 'G', 'Bb', 'D', 'F'])
if __name__ == '__main__':
unittest.main()
| mit | Python |
3707ed6b193a5eed9ec4505f6a283fdaff07ad5e | fix deprecated method | Mifiel/python-api-client | mifiel/api_auth.py | mifiel/api_auth.py | """
[ApiAuth](https://github.com/mgomes/api_auth) for python
Based on https://github.com/pd/httpie-api-auth by Kyle Hargraves
Usage:
import requests
requests.get(url, auth=ApiAuth(app_id, secret_key))
"""
import hmac, base64, hashlib, datetime
from requests.auth import AuthBase
from urllib.parse import urlparse
class ApiAuth(AuthBase):
def __init__(self, access_id, secret_key):
self.access_id = access_id
self.secret_key = secret_key.encode('ascii')
def __call__(self, request):
method = request.method.upper()
content_type = request.headers.get('content-type')
if not content_type:
content_type = ''
content_md5 = request.headers.get('content-md5')
if not content_md5:
m = hashlib.md5()
body = request.body
if not body: body = ''
m.update(body.encode('ascii'))
content_md5 = base64.b64encode(m.digest()).decode()
request.headers['content-md5'] = content_md5
httpdate = request.headers.get('date')
if not httpdate:
now = datetime.datetime.utcnow()
httpdate = now.strftime('%a, %d %b %Y %H:%M:%S GMT')
request.headers['Date'] = httpdate
url = urlparse(request.url)
path = url.path
if url.query:
path = path + '?' + url.query
canonical_string = '%s,%s,%s,%s,%s' % (method, content_type, content_md5, path, httpdate)
digest = hmac.new(
self.secret_key,
canonical_string.encode('ascii'),
hashlib.sha1
).digest()
signature = base64.encodebytes(digest).rstrip().decode()
request.headers['Authorization'] = 'APIAuth %s:%s' % (self.access_id, signature)
return request
| """
[ApiAuth](https://github.com/mgomes/api_auth) for python
Based on https://github.com/pd/httpie-api-auth by Kyle Hargraves
Usage:
import requests
requests.get(url, auth=ApiAuth(app_id, secret_key))
"""
import hmac, base64, hashlib, datetime
from requests.auth import AuthBase
from urllib.parse import urlparse
class ApiAuth(AuthBase):
def __init__(self, access_id, secret_key):
self.access_id = access_id
self.secret_key = secret_key.encode('ascii')
def __call__(self, request):
method = request.method.upper()
content_type = request.headers.get('content-type')
if not content_type:
content_type = ''
content_md5 = request.headers.get('content-md5')
if not content_md5:
m = hashlib.md5()
body = request.body
if not body: body = ''
m.update(body.encode('ascii'))
content_md5 = base64.b64encode(m.digest()).decode()
request.headers['content-md5'] = content_md5
httpdate = request.headers.get('date')
if not httpdate:
now = datetime.datetime.utcnow()
httpdate = now.strftime('%a, %d %b %Y %H:%M:%S GMT')
request.headers['Date'] = httpdate
url = urlparse(request.url)
path = url.path
if url.query:
path = path + '?' + url.query
canonical_string = '%s,%s,%s,%s,%s' % (method, content_type, content_md5, path, httpdate)
digest = hmac.new(
self.secret_key,
canonical_string.encode('ascii'),
hashlib.sha1
).digest()
signature = base64.encodestring(digest).rstrip().decode()
request.headers['Authorization'] = 'APIAuth %s:%s' % (self.access_id, signature)
return request
| mit | Python |
c0894d3c14b8273364454dfa13c94311578ff698 | update for diverse usage | mykespb/pythoner,mykespb/pythoner,mykespb/pythoner | mk-1strecurring.py | mk-1strecurring.py | #!/usr/bin/env python3
# (C) Mikhail Kolodin, 2018, ver. 2018-05-31 1.1
# class ic test task: find 1st recurring character in a string
import random
import string
MINSIZE = 1 # min size of test string
MAXSIZE = 19 # its max size
TESTS = 10 # no of tests
alf = string.ascii_uppercase # test alphabet
arr = []
size = 0
def prepare():
"""organize tests"""
global arr, size
size = random.randint(MINSIZE, MAXSIZE)
arr = "".join([random.choice(alf) for i in range(size)])
def solve():
"""find char, reusable function"""
found = ""
for c in arr:
if c in found:
return c
else:
found += c
else:
return ""
def show():
"""find and show char, function to show result only"""
c = solve()
return c if c else "None"
def main():
"""run all"""
for test in range(TESTS):
prepare()
print ("test =", test, ", size = %2d" % (size), ", arr =", arr.ljust(MAXSIZE), ", found recurrent:", show())
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
# (C) Mikhail Kolodin, 2018, ver. 1.0
# class ic test task: find 1st recurring character in a string
import random
import string
MINSIZE = 1 # min size of test string
MAXSIZE = 9 # its max size
TESTS = 10 # no of tests
alf = string.ascii_uppercase # test alphabet
arr = []
size = 0
def prepare():
"""organize tests"""
global arr, size
size = random.randint(MINSIZE, MAXSIZE)
arr = "".join([random.choice(alf) for i in range(size)])
def solve():
"""find char"""
global arr
found = ""
for c in arr:
if c in found:
return c
else:
found += c
else:
return "None"
def main():
"""run all"""
global arr, szie
for test in range(TESTS):
prepare()
print ("test =", test, ", size =", size, ", arr =", arr.ljust(MAXSIZE), ", found recurrent:", solve())
main()
| apache-2.0 | Python |
e379f35a15956204f09aa593979fe0a0186cf56e | Update the upload tool | vlegoff/cocomud | tools/upload_build.py | tools/upload_build.py | """This script upload a newly-build version of CocoMUD for Windows.
The Download wiki page on Redmine are updated.
Requirements:
This script needs 'python-redmine', which you can obtain with
pip install python-redmine
"""
import argparse
from json import dumps
import os
import re
import sys
from urllib import request
from redminelib import Redmine
from redminelib.exceptions import ResourceNotFoundError
# Create an argument parser
parser = argparse.ArgumentParser(
description="upload a new CocoMUD build")
parser.add_argument("key", help="the API key to upload to Redmine")
args = parser.parse_args()
# Configure the system
key = args.key
# Connects to the REST API
redmine = Redmine("https://cocomud.plan.io", key=key)
# Check that the file exists
path = os.path.abspath("../src/build/CocoMUD.zip")
if not os.path.exists(path):
print("The file {} cannot be found.".format(path))
sys.exit(1)
# Then upload this file
print("Retrieving the Download wiki page on 'cocomud-client'...")
page = redmine.wiki_page.get("Download", project_id="cocomud-client")
print("Uploading {}...".format(path))
text = page.text
page.uploads = [{"path": path, "filename": "CocoMUD.zip"}]
page.text = text
print("Saving the page...", page.save())
# Get the new resource URL
url = list(page.attachments)[-1].content_url
# Retrieve the version number
with open("../src/version.py", encoding="utf-8") as file:
content = file.read()
version = content.partition("=")[2].strip()
# Now we get ALL wiki pages with the title 'Download' and replace the URL
for project in redmine.project.all():
identifier = project.identifier
# Try to get the 'Download' page
try:
page = redmine.wiki_page.get("Download", project_id=identifier)
except ResourceNotFoundError:
pass
else:
print("Updating the Download page for the {} project...".format(
identifier))
text = page.text
text = re.sub(r"https\://cocomud\.plan\.io/attachments/" \
r"download/\d+/CocoMUD\.zip", url, text)
text = re.sub(r"\+\*\d+\*\+", "+*" + version + "*+", text)
page.text = text
success = page.save()
if success:
print("Correctly saved the wiki page.")
else:
print("Error while saving the wiki page.")
# Update the build information in the custom field
build = dumps({version: {"windows": url}})
print("Updating the custom field")
redmine.project.update(resource_id=2,
custom_fields=[{"id": 3, "value": build}])
print("URL", url)
| """This script upload a newly-build version of CocoMUD for Windows.
The Download wiki page on Redmine are updated.
Requirements:
This script needs 'python-redmine', which you can obtain with
pip install python-redmine
"""
import argparse
from json import dumps
import os
import re
import sys
import urllib2
from redminelib import Redmine
from redminelib.exceptions import ResourceNotFoundError
# Create an argument parser
parser = argparse.ArgumentParser(
description="upload a new CocoMUD build")
parser.add_argument("key", help="the API key to upload to Redmine")
args = parser.parse_args()
# Configure the system
key = args.key
# Connects to the REST API
redmine = Redmine("https://cocomud.plan.io", key=key)
# Check that the file exists
path = os.path.abspath("../src/build/CocoMUD.zip")
if not os.path.exists(path):
print "The file {} cannot be found.".format(path)
sys.exit(1)
# Then upload this file
print "Retrieving the Download wiki page on 'cocomud-client'..."
page = redmine.wiki_page.get("Download", project_id="cocomud-client")
print "Uploading {}...".format(path)
text = page.text
page.uploads = [{"path": path, "filename": "CocoMUD.zip"}]
page.text = text
print "Saving the page...", page.save()
# Get the new resource URL
url = list(page.attachments)[-1].content_url
# Retrieve the version number
with open("../src/version.py") as file:
content = file.read()
version = content.partition("=")[2].strip()
# Now we get ALL wiki pages with the title 'Download' and replace the URL
for project in redmine.project.all():
identifier = project.identifier
# Try to get the 'Download' page
try:
page = redmine.wiki_page.get("Download", project_id=identifier)
except ResourceNotFoundError:
pass
else:
print "Updating the Download page for the {} project...".format(
identifier)
text = page.text
text = re.sub(r"https\://cocomud\.plan\.io/attachments/" \
r"download/\d+/CocoMUD\.zip", url, text)
text = re.sub(r"\+\*\d+\*\+", "+*" + version + "*+", text)
page.text = text
success = page.save()
if success:
print "Correctly saved the wiki page."
else:
print "Error while saving the wiki page."
# Update the build information in the custom field
build = dumps({version: {"windows": url}})
print "Updating the custom field"
redmine.project.update(resource_id=2,
custom_fields=[{"id": 3, "value": build}])
print "URL", url
| bsd-3-clause | Python |
3d331ecdb9cb0e64050eb3e4ece27242e1714b3e | Update C_Temperature_Vertical_sections.py | Herpinemmanuel/Oceanography | Cas_1/Temperature/C_Temperature_Vertical_sections.py | Cas_1/Temperature/C_Temperature_Vertical_sections.py | import numpy as np
import matplotlib.pyplot as plt
from xmitgcm import open_mdsdataset
plt.ion()
dir1 = '/homedata/bderembl/runmit/test_southatlgyre'
ds1 = open_mdsdataset(dir1,iters='all',prefix=['T'])
Height = ds1.T.Z
print(Height)
nx = int(len(ds1.T.XC)/2)
print(nx)
ny = int(len(ds1.T.YC)/2)
print(ny)
nt = -1
# Vertical Section of Temperature
plt.figure(1)
ds1['T'].where(ds1.hFacC>0)[nt,:,ny,:].plot()
plt.title('Case 1 : Temperature (t=-1 ; YC = 30S)')
plt.savefig('T_Temperature_Vertical_section_xz_cas1'+'.png')
plt.clf()
plt.figure(2)
ds1['T'].where(ds1.hFacC>0)[nt,:,:,nx].plot()
plt.title('Case 1 : Temperature (t=-1 ; XC = 0E)')
plt.savefig('T_Temperature_Vertical_section_yz_cas1'+'.png')
plt.clf()
| import numpy as np
import matplotlib.pyplot as plt
from xmitgcm import open_mdsdataset
plt.ion()
dir1 = '/homedata/bderembl/runmit/test_southatlgyre'
ds1 = open_mdsdataset(dir1,iters='all',prefix=['T'])
Height = ds1.T.Z
print(Height)
nx = int(len(ds1.T.XC)/2)
print(nx)
ny = int(len(ds1.T.YC)/2)
print(ny)
nt = -1
# Vertical Section of Temperature
plt.figure(1)
ds1['T'].where(ds1.hFacC>0)[nt,:,ny,:].plot()
plt.title('Case 1 : Temperature (t=-1 ; YC = 30S)')
plt.savefig('T_Temperature_Vertical_section_xz_cas4'+'.png')
plt.clf()
plt.figure(2)
ds1['T'].where(ds1.hFacC>0)[nt,:,:,nx].plot()
plt.title('Case 1 : Temperature (t=-1 ; XC = 0E)')
plt.savefig('T_Temperature_Vertical_section_yz_cas4'+'.png')
plt.clf()
| mit | Python |
b2542f8c3625150f9716eb0b1fcb44ee15520ae8 | fix path to nvim files | rr-/dotfiles,rr-/dotfiles,rr-/dotfiles | mod/vim/install.py | mod/vim/install.py | import packages
import util
def run():
spell_dir = '~/.config/vim/spell/'
choices = [
'vim',
'gvim', # gvim supports for X11 clipboard, but has more dependencies
]
choice = None
while choice not in choices:
choice = input('Which package to install? (%s) ' % choices).lower()
packages.try_install(choice)
packages.try_install('fzf')
for name in ['undo', 'backup', 'swap', 'spell', 'autoload']:
util.create_dir('~/.config/vim/' + name)
for path in util.find('./../nvim/*.vim'):
util.create_symlink(path, '~/.config/vim/')
util.create_symlink('./../nvim/spell/pl.utf-8.add', spell_dir)
util.create_symlink('./../nvim/spell/en.utf-8.add', spell_dir)
util.download(
'ftp://ftp.vim.org/pub/vim/runtime/spell/en.utf-8.spl',
'~/.config/vim/spell/')
util.download(
'ftp://ftp.vim.org/pub/vim/runtime/spell/pl.utf-8.spl',
'~/.config/vim/spell/')
util.download(
'https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim',
'~/.config/vim/autoload/plug.vim')
util.create_file(
'~/.config/zsh/editor.sh', 'export EDITOR=vim', overwrite=True)
util.create_symlink('~/.config/vim/', '~/.vim')
util.create_symlink('~/.config/vim/init.vim', '~/.vimrc')
commands = ['PlugInstall']
for path in util.find(spell_dir):
if 'add' in path and 'spl' not in path:
commands.append('mkspell! ' + path)
util.run_verbose(['vim'] + sum([['-c', cmd] for cmd in commands], []))
| import packages
import util
def run():
spell_dir = '~/.config/vim/spell/'
choices = [
'vim',
'gvim', # gvim supports for X11 clipboard, but has more dependencies
]
choice = None
while choice not in choices:
choice = input('Which package to install? (%s) ' % choices).lower()
packages.try_install(choice)
packages.try_install('fzf')
for name in ['undo', 'backup', 'swap', 'spell', 'autoload']:
util.create_dir('~/.config/vim/' + name)
for path in util.find('./../mod-nvim/*.vim'):
util.create_symlink(path, '~/.config/vim/')
util.create_symlink('./../mod-nvim/spell/pl.utf-8.add', spell_dir)
util.create_symlink('./../mod-nvim/spell/en.utf-8.add', spell_dir)
util.download(
'ftp://ftp.vim.org/pub/vim/runtime/spell/en.utf-8.spl',
'~/.config/vim/spell/')
util.download(
'ftp://ftp.vim.org/pub/vim/runtime/spell/pl.utf-8.spl',
'~/.config/vim/spell/')
util.download(
'https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim',
'~/.config/vim/autoload/plug.vim')
util.create_file(
'~/.config/zsh/editor.sh', 'export EDITOR=vim', overwrite=True)
util.create_symlink('~/.config/vim/', '~/.vim')
util.create_symlink('~/.config/vim/init.vim', '~/.vimrc')
commands = ['PlugInstall']
for path in util.find(spell_dir):
if 'add' in path and 'spl' not in path:
commands.append('mkspell! ' + path)
util.run_verbose(['vim'] + sum([['-c', cmd] for cmd in commands], []))
| mit | Python |
6d2e66ab5b9b452474701ffc5035e4a8106db637 | Add test_Record unit tests | NTUTVisualScript/Visual_Script,NTUTVisualScript/Visual_Script,NTUTVisualScript/Visual_Script,NTUTVisualScript/Visual_Script | tests/test_Record.py | tests/test_Record.py | import unittest
import os, shutil
from GeometrA.src.Record import *
from GeometrA.src.File.WorkSpace import WorkSpace
RECORD_FILE = './tests/record.log'
class RecordTestSuite(unittest.TestCase):
@classmethod
def setUpClass(cls):
path = './tests/Project0'
if os.path.isdir(path):
shutil.rmtree(path, True)
def setUp(self):
self.recordFile = './tests/record.log'
self.path = os.getcwd()
shutil.copytree('./tests/File/Project0', './tests/Project0')
def tearDown(self):
if os.path.isfile(self.recordFile):
os.remove(self.recordFile)
def tearDown(self):
path = './tests/Project0'
if os.path.isdir(path):
shutil.rmtree('path', True)
def testExportLog(self):
p = ['Project0', {'Project0':{'Suite1': ['case1', 'case2'],
'Suite2': ['case2']}}]
path = self.path
ws = WorkSpace(self.path, p)
exportLog(workspace = ws)
self.assertTrue(os.path.isfile(self.recordFile))
def testLog(self):
p = ['Project0', {'Project0':{'Suite1': ['case1', 'case2'],
'Suite2': ['case2']}}]
path = self.path
ws1 = WorkSpace(self.path, p)
exportLog(workspace = ws1)
ws = WorkSpace()
loadLog(ws)
log = [os.getcwd() + '/tests/Project0/Project0.json']
self.assertEqual(log, ws.log())
| # import unittest
#
# import os, shutil
#
# from GeometrA.src.Record import *
# from GeometrA.src.File.WorkSpace import WorkSpace
#
# RECORD_FILE = './tests/record.log'
#
# class RecordTestSuite(unittest.TestCase):
# @classmethod
# def setUpClass(cls):
# path = './tests/Project0'
# if os.path.isdir(path):
# shutil.rmtree(path, True)
#
# def setUp(self):
# self.recordFile = './tests/record.log'
# self.path = os.getcwd()
# shutil.copytree('./tests/File/Project0', './tests/Project0')
#
# def tearDown(self):
# if os.path.isfile(self.recordFile):
# os.remove(self.recordFile)
#
# def tearDown(self):
# path = './tests/Project0'
# if os.path.isdir(path):
# shutil.rmtree('path', True)
#
# def testExportLog(self):
# p = ['Project0', {'Project0':{'Suite1': ['case1', 'case2'],
# 'Suite2': ['case2']}}]
# path = self.path
# ws = WorkSpace(self.path, p)
#
# exportLog(workspace = ws)
# self.assertTrue(os.path.isfile(self.recordFile))
#
# def testLog(self):
# p = ['Project0', {'Project0':{'Suite1': ['case1', 'case2'],
# 'Suite2': ['case2']}}]
# path = self.path
# ws1 = WorkSpace(self.path, p)
#
# exportLog(workspace = ws1)
#
# ws = WorkSpace()
# loadLog(ws)
#
# log = [os.getcwd() + '/tests/Project0/Project0.json']
# self.assertEqual(log, ws.log())
| mit | Python |
3425c2c9d19c1d0a54dafde6cc70d571421c82a9 | Fix string app import error for python 3.5 | encode/uvicorn,encode/uvicorn | tests/test_config.py | tests/test_config.py | import logging
import socket
import pytest
from uvicorn import protocols
from uvicorn.config import Config
from uvicorn.middleware.debug import DebugMiddleware
from uvicorn.middleware.wsgi import WSGIMiddleware
async def asgi_app():
pass
def wsgi_app():
pass
def test_debug_app():
config = Config(app=asgi_app, debug=True)
config.load()
assert config.debug is True
assert isinstance(config.loaded_app, DebugMiddleware)
def test_wsgi_app():
config = Config(app=wsgi_app, interface="wsgi")
config.load()
assert isinstance(config.loaded_app, WSGIMiddleware)
assert config.interface == "wsgi"
def test_proxy_headers():
config = Config(app=asgi_app, proxy_headers=True)
config.load()
assert config.proxy_headers is True
def test_app_unimportable():
config = Config(app="no.such:app")
with pytest.raises(ImportError):
config.load()
def test_concrete_http_class():
config = Config(app=asgi_app, http=protocols.http.h11_impl.H11Protocol)
config.load()
assert config.http_protocol_class is protocols.http.h11_impl.H11Protocol
def test_logger():
logger = logging.getLogger("just-for-tests")
config = Config(app=asgi_app, logger=logger)
config.load()
assert config.logger is logger
def test_socket_bind():
config = Config(app=asgi_app)
config.load()
assert isinstance(config.bind_socket(), socket.socket)
def test_ssl_config(certfile_and_keyfile):
certfile, keyfile = certfile_and_keyfile
config = Config(app=asgi_app, ssl_certfile=certfile.name, ssl_keyfile=keyfile.name)
config.load()
assert config.is_ssl is True
| import logging
import socket
import pytest
from uvicorn import protocols
from uvicorn.config import Config
from uvicorn.middleware.debug import DebugMiddleware
from uvicorn.middleware.wsgi import WSGIMiddleware
async def asgi_app():
pass
def wsgi_app():
pass
def test_debug_app():
config = Config(app=asgi_app, debug=True)
config.load()
assert config.debug is True
assert isinstance(config.loaded_app, DebugMiddleware)
def test_wsgi_app():
config = Config(app=wsgi_app, interface="wsgi")
config.load()
assert isinstance(config.loaded_app, WSGIMiddleware)
assert config.interface == "wsgi"
def test_proxy_headers():
config = Config(app=asgi_app, proxy_headers=True)
config.load()
assert config.proxy_headers is True
def test_app_unimportable():
config = Config(app="no.such:app")
with pytest.raises(ModuleNotFoundError):
config.load()
def test_concrete_http_class():
config = Config(app=asgi_app, http=protocols.http.h11_impl.H11Protocol)
config.load()
assert config.http_protocol_class is protocols.http.h11_impl.H11Protocol
def test_logger():
logger = logging.getLogger("just-for-tests")
config = Config(app=asgi_app, logger=logger)
config.load()
assert config.logger is logger
def test_socket_bind():
config = Config(app=asgi_app)
config.load()
assert isinstance(config.bind_socket(), socket.socket)
def test_ssl_config(certfile_and_keyfile):
certfile, keyfile = certfile_and_keyfile
config = Config(app=asgi_app, ssl_certfile=certfile.name, ssl_keyfile=keyfile.name)
config.load()
assert config.is_ssl is True
| bsd-3-clause | Python |
1df8efb63333e89777820a96d78d5a59252b303d | Rename test specific to with gpg | theherk/figgypy | tests/test_config.py | tests/test_config.py | import unittest
import figgypy.config
import sys
import os
class TestConfig(unittest.TestCase):
def test_config_load_with_gpg(self):
os.environ['FIGGY_GPG_HOME']='tests/resources/test-keys'
c = figgypy.config.Config('tests/resources/test-config.yaml')
self.assertEqual(c.db['host'], 'db.heck.ya')
self.assertEqual(c.db['pass'], 'test password')
if __name__ == '__main__':
unittest.main()
| import unittest
import figgypy.config
import sys
import os
class TestConfig(unittest.TestCase):
def test_config_load(self):
os.environ['FIGGY_GPG_HOME']='tests/resources/test-keys'
c = figgypy.config.Config('tests/resources/test-config.yaml')
self.assertEqual(c.db['host'], 'db.heck.ya')
self.assertEqual(c.db['pass'], 'test password')
if __name__ == '__main__':
unittest.main()
| mit | Python |
66eddf04efd46fb3dbeae34c4d82f673a88be70f | Test the ability to add phone to the person | dizpers/python-address-book-assignment | tests/test_person.py | tests/test_person.py | from copy import copy
from unittest import TestCase
from address_book import Person
class PersonTestCase(TestCase):
def test_get_groups(self):
pass
def test_add_address(self):
basic_address = ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42']
person = Person(
'John',
'Doe',
copy(basic_address),
['+79834772053'],
['john@gmail.com']
)
person.add_address('new address')
self.assertEqual(
person.addresses,
basic_address + ['new address']
)
def test_add_phone(self):
basic_phone = ['+79237778492']
person = Person(
'John',
'Doe',
copy(basic_phone),
['+79834772053'],
['john@gmail.com']
)
person.add_phone('+79234478810')
self.assertEqual(
person.addresses,
basic_phone + ['+79234478810']
)
def test_add_email(self):
pass | from copy import copy
from unittest import TestCase
from address_book import Person
class PersonTestCase(TestCase):
def test_get_groups(self):
pass
def test_add_address(self):
basic_address = ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42']
person = Person(
'John',
'Doe',
copy(basic_address),
['+79834772053'],
['john@gmail.com']
)
person.add_address('new address')
self.assertEqual(
person.addresses,
basic_address + ['new address']
)
def test_add_phone(self):
pass
def test_add_email(self):
pass | mit | Python |
5017ee713fd03902aa502836654e1961fb7575f1 | test form action url | satyrius/cmsplugin-feedback,satyrius/cmsplugin-feedback | tests/test_plugin.py | tests/test_plugin.py | from bs4 import BeautifulSoup
from cms.api import add_plugin
from cms.models import Placeholder
from django.core.urlresolvers import reverse
from django.test import TestCase
from cmsplugin_feedback.cms_plugins import FeedbackPlugin, \
DEFAULT_FORM_FIELDS_ID, DEFAULT_FORM_CLASS
from cmsplugin_feedback.forms import FeedbackMessageForm
class FeedbackPluginTests(TestCase):
def setUp(self):
self.placeholder = Placeholder.objects.create(slot='test')
def add_plugin(self, **kwargs):
model_instance = add_plugin(
self.placeholder,
FeedbackPlugin,
'en',
**kwargs)
return model_instance
def test_plugin_context(self):
model = self.add_plugin()
plugin = model.get_plugin_class_instance()
context = plugin.render({}, model, None)
self.assertIn('form', context)
self.assertIsInstance(context['form'], FeedbackMessageForm)
self.assertEqual(context['form'].auto_id, DEFAULT_FORM_FIELDS_ID)
self.assertIn('form_class', context)
self.assertEqual(context['form_class'], DEFAULT_FORM_CLASS)
def test_form_title(self):
title = 'Feedback Form'
plugin = self.add_plugin(title=title)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.h1.string, title)
def test_default_submit_button(self):
plugin = self.add_plugin()
self.assertTrue(plugin.submit)
default = plugin._meta.get_field_by_name('submit')[0].default
self.assertEqual(plugin.submit, default)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.find(type='submit').string, default)
def test_submit_button(self):
text = 'Send'
plugin = self.add_plugin(submit=text)
default = plugin._meta.get_field_by_name('submit')[0].default
self.assertNotEqual(text, default)
self.assertEqual(plugin.submit, text)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.find(type='submit').string, text)
def test_form_action_url(self):
plugin = self.add_plugin()
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(
soup.form['action'],
reverse('feedback-form', args=[plugin.id]))
| from bs4 import BeautifulSoup
from cms.api import add_plugin
from cms.models import Placeholder
from django.test import TestCase
from cmsplugin_feedback.cms_plugins import FeedbackPlugin, \
DEFAULT_FORM_FIELDS_ID, DEFAULT_FORM_CLASS
from cmsplugin_feedback.forms import FeedbackMessageForm
class FeedbackPluginTests(TestCase):
def setUp(self):
self.placeholder = Placeholder.objects.create(slot='test')
def add_plugin(self, **kwargs):
model_instance = add_plugin(
self.placeholder,
FeedbackPlugin,
'en',
**kwargs)
return model_instance
def test_plugin_context(self):
model = self.add_plugin()
plugin = model.get_plugin_class_instance()
context = plugin.render({}, model, None)
self.assertIn('form', context)
self.assertIsInstance(context['form'], FeedbackMessageForm)
self.assertEqual(context['form'].auto_id, DEFAULT_FORM_FIELDS_ID)
self.assertIn('form_class', context)
self.assertEqual(context['form_class'], DEFAULT_FORM_CLASS)
def test_form_title(self):
title = 'Feedback Form'
plugin = self.add_plugin(title=title)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.h1.string, title)
def test_default_submit_button(self):
plugin = self.add_plugin()
self.assertTrue(plugin.submit)
default = plugin._meta.get_field_by_name('submit')[0].default
self.assertEqual(plugin.submit, default)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.find(type='submit').string, default)
def test_submit_button(self):
text = 'Send'
plugin = self.add_plugin(submit=text)
default = plugin._meta.get_field_by_name('submit')[0].default
self.assertNotEqual(text, default)
self.assertEqual(plugin.submit, text)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.find(type='submit').string, text)
| mit | Python |
28f6af7f84860535a1a82750df286f78320a6856 | Fix monkeypatching | audiolabs/stft | tests/test_things.py | tests/test_things.py | from __future__ import division
import stft
import numpy
import pytest
@pytest.fixture(params=[1, 2])
def channels(request):
return request.param
@pytest.fixture(params=[0, 1, 4])
def padding(request):
return request.param
@pytest.fixture(params=[2048])
def length(request):
return request.param
@pytest.fixture
def signal(channels, length):
return numpy.squeeze(numpy.random.random((length, channels)))
@pytest.fixture(params=[512])
def framelength(request):
return request.param
def test_shape(length, framelength):
a = numpy.squeeze(numpy.random.random((length, 1)))
x = stft.spectrogram(a, framelength=framelength, halved=True)
assert x.shape[0] == framelength / 2 + 1
x_2 = stft.spectrogram(a, framelength=framelength, halved=False)
assert x_2.shape[0] == framelength
def test_windowlength_errors():
"""
Test if way too short signals can be transformed
"""
siglen = 512
framelen = 2048
stft.spectrogram(numpy.random.random(siglen), framelength=framelen)
def test_precision(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.allclose(a, y)
def test_rms(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.sqrt(numpy.mean((a - y) ** 2)) < 1e-8
def test_maxdim():
a = numpy.random.random((512, 2, 2))
with pytest.raises(ValueError):
stft.spectrogram(a)
b = numpy.random.random((512, 2, 2, 3))
with pytest.raises(ValueError):
stft.ispectrogram(b)
def test_issue1():
a = numpy.random.random((512, 1))
b = stft.spectrogram(a)
assert b.ndim == 2
def raiser(*args):
raise AttributeError
def test_fallback(monkeypatch):
# Try monkeypatching signal.cosine away.
# Ignore AttributeErrors during monkeypatching, for older scipy versions
import scipy.signal
try:
monkeypatch.setattr("scipy.signal.cosine", raiser)
except Exception:
pass
return test_windowlength_errors()
| from __future__ import division
import stft
import numpy
import pytest
@pytest.fixture(params=[1, 2])
def channels(request):
return request.param
@pytest.fixture(params=[0, 1, 4])
def padding(request):
return request.param
@pytest.fixture(params=[2048])
def length(request):
return request.param
@pytest.fixture
def signal(channels, length):
return numpy.squeeze(numpy.random.random((length, channels)))
@pytest.fixture(params=[512])
def framelength(request):
return request.param
def test_shape(length, framelength):
a = numpy.squeeze(numpy.random.random((length, 1)))
x = stft.spectrogram(a, framelength=framelength, halved=True)
assert x.shape[0] == framelength / 2 + 1
x_2 = stft.spectrogram(a, framelength=framelength, halved=False)
assert x_2.shape[0] == framelength
def test_windowlength_errors():
"""
Test if way too short signals can be transformed
"""
siglen = 512
framelen = 2048
stft.spectrogram(numpy.random.random(siglen), framelength=framelen)
def test_precision(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.allclose(a, y)
def test_rms(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.sqrt(numpy.mean((a - y) ** 2)) < 1e-8
def test_maxdim():
a = numpy.random.random((512, 2, 2))
with pytest.raises(ValueError):
stft.spectrogram(a)
b = numpy.random.random((512, 2, 2, 3))
with pytest.raises(ValueError):
stft.ispectrogram(b)
def test_issue1():
a = numpy.random.random((512, 1))
b = stft.spectrogram(a)
assert b.ndim == 2
def raiser(*args):
raise AttributeError
def test_fallback(monkeypatch):
# Try monkeypatching signal.cosine away.
# Ignore AttributeErrors during monkeypatching, for older scipy versions
try:
import scipy.signal
monkeypatch.setattr("scipy.signal.cosine", raiser)
except AttributeError:
pass
return test_windowlength_errors()
| mit | Python |
36200dea5889bdf4ad920adc1ab04ae3870f74ac | Edit varnet model (#5096) | Project-MONAI/MONAI,Project-MONAI/MONAI,Project-MONAI/MONAI,Project-MONAI/MONAI | tests/test_varnet.py | tests/test_varnet.py | # Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import torch
from parameterized import parameterized
from monai.apps.reconstruction.networks.nets.coil_sensitivity_model import CoilSensitivityModel
from monai.apps.reconstruction.networks.nets.complex_unet import ComplexUnet
from monai.apps.reconstruction.networks.nets.varnet import VariationalNetworkModel
from monai.networks import eval_mode
from tests.utils import test_script_save
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
coil_sens_model = CoilSensitivityModel(spatial_dims=2, features=[8, 16, 32, 64, 128, 8])
refinement_model = ComplexUnet(spatial_dims=2, features=[8, 16, 32, 64, 128, 8])
num_cascades = 2
TESTS = []
TESTS.append([coil_sens_model, refinement_model, num_cascades, (1, 3, 50, 50, 2), (1, 50, 50)]) # batch=1
TESTS.append([coil_sens_model, refinement_model, num_cascades, (2, 3, 50, 50, 2), (2, 50, 50)]) # batch=2
class TestVarNet(unittest.TestCase):
@parameterized.expand(TESTS)
def test_shape(self, coil_sens_model, refinement_model, num_cascades, input_shape, expected_shape):
net = VariationalNetworkModel(coil_sens_model, refinement_model, num_cascades).to(device)
mask_shape = [1 for _ in input_shape]
mask_shape[-2] = input_shape[-2]
mask = torch.zeros(mask_shape)
mask[..., mask_shape[-2] // 2 - 5 : mask_shape[-2] // 2 + 5, :] = 1
with eval_mode(net):
result = net(torch.randn(input_shape).to(device), mask.bool().to(device))
self.assertEqual(result.shape, expected_shape)
@parameterized.expand(TESTS)
def test_script(self, coil_sens_model, refinement_model, num_cascades, input_shape, expected_shape):
net = VariationalNetworkModel(coil_sens_model, refinement_model, num_cascades)
mask_shape = [1 for _ in input_shape]
mask_shape[-2] = input_shape[-2]
mask = torch.zeros(mask_shape)
mask[..., mask_shape[-2] // 2 - 5 : mask_shape[-2] // 2 + 5, :] = 1
test_data = torch.randn(input_shape)
test_script_save(net, test_data, mask.bool())
if __name__ == "__main__":
unittest.main()
| # Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import torch
from parameterized import parameterized
from monai.apps.reconstruction.networks.nets.coil_sensitivity_model import CoilSensitivityModel
from monai.apps.reconstruction.networks.nets.complex_unet import ComplexUnet
from monai.apps.reconstruction.networks.nets.varnet import VariationalNetworkModel
from monai.networks import eval_mode
from tests.utils import test_script_save
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
coil_sens_model = CoilSensitivityModel(spatial_dims=2, features=[8, 16, 32, 64, 128, 8])
refinement_model = ComplexUnet(spatial_dims=2, features=[8, 16, 32, 64, 128, 8])
num_cascades = 12
TESTS = []
TESTS.append([coil_sens_model, refinement_model, num_cascades, (1, 10, 300, 200, 2), (1, 300, 200)]) # batch=1
TESTS.append([coil_sens_model, refinement_model, num_cascades, (2, 10, 300, 200, 2), (2, 300, 200)]) # batch=2
class TestVarNet(unittest.TestCase):
@parameterized.expand(TESTS)
def test_shape(self, coil_sens_model, refinement_model, num_cascades, input_shape, expected_shape):
net = VariationalNetworkModel(coil_sens_model, refinement_model, num_cascades).to(device)
mask_shape = [1 for _ in input_shape]
mask_shape[-2] = input_shape[-2]
mask = torch.zeros(mask_shape)
mask[..., mask_shape[-2] // 2 - 5 : mask_shape[-2] // 2 + 5, :] = 1
with eval_mode(net):
result = net(torch.randn(input_shape).to(device), mask.byte().to(device))
self.assertEqual(result.shape, expected_shape)
@parameterized.expand(TESTS)
def test_script(self, coil_sens_model, refinement_model, num_cascades, input_shape, expected_shape):
net = VariationalNetworkModel(coil_sens_model, refinement_model, num_cascades)
mask_shape = [1 for _ in input_shape]
mask_shape[-2] = input_shape[-2]
mask = torch.zeros(mask_shape)
mask[..., mask_shape[-2] // 2 - 5 : mask_shape[-2] // 2 + 5, :] = 1
test_data = torch.randn(input_shape)
test_script_save(net, test_data, mask.byte())
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python |
3e84dcb7b449db89ca6ce2b91b34a5e8f8428b39 | Allow sub- and superscript tags | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | core/markdown.py | core/markdown.py | from markdown.extensions import nl2br, sane_lists, fenced_code
from pymdownx import magiclink
from mdx_unimoji import UnimojiExtension
import utils.markdown
markdown_extensions = [
magiclink.MagiclinkExtension(),
nl2br.Nl2BrExtension(),
utils.markdown.ExtendedLinkExtension(),
sane_lists.SaneListExtension(),
fenced_code.FencedCodeExtension(),
utils.markdown.CuddledListExtension(),
UnimojiExtension()
]
content_allowed_tags = (
# text
'p', 'em', 'strong', 'br', 'a', 'img', 'sub', 'sup',
# citation
'blockquote', 'cite',
# headings
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
# lists
'ol', 'ul', 'li',
# code
'pre', 'code'
)
content_allowed_attributes = {
'*': ['id', 'title'],
'a': ['href', 'title', 'data-component', 'data-grouplink-ref'],
'code': ['class'],
'img': ['src', 'alt']
}
| from markdown.extensions import nl2br, sane_lists, fenced_code
from pymdownx import magiclink
from mdx_unimoji import UnimojiExtension
import utils.markdown
markdown_extensions = [
magiclink.MagiclinkExtension(),
nl2br.Nl2BrExtension(),
utils.markdown.ExtendedLinkExtension(),
sane_lists.SaneListExtension(),
fenced_code.FencedCodeExtension(),
utils.markdown.CuddledListExtension(),
UnimojiExtension()
]
content_allowed_tags = (
# text
'p', 'em', 'strong', 'br', 'a', 'img',
# citation
'blockquote', 'cite',
# headings
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
# lists
'ol', 'ul', 'li',
# code
'pre', 'code'
)
content_allowed_attributes = {
'*': ['id', 'title'],
'a': ['href', 'title', 'data-component', 'data-grouplink-ref'],
'code': ['class'],
'img': ['src', 'alt']
}
| agpl-3.0 | Python |
b3c55b059293d664d3e029b9c3d03203ff4af5a5 | remove ws | iivvoo/resturo | resturo/tests/models.py | resturo/tests/models.py | from ..models import Organization as BaseOrganization
from ..models import Membership as BaseMembership
class Organization(BaseOrganization):
"""
"""
class Membership(BaseMembership):
""" Provide non-abstract implementation for Membership model,
define some roles
"""
ROLE_MEMBER = 1
| from ..models import Organization as BaseOrganization
from ..models import Membership as BaseMembership
class Organization(BaseOrganization):
"""
"""
class Membership(BaseMembership):
""" Provide non-abstract implementation for Membership model,
define some roles
"""
ROLE_MEMBER = 1
| isc | Python |
32dd33126c9fa0076c8d7c9e8024a709674f8614 | Bump Version 0.0.28 -> 0.0.29 | 3bot/3bot,3bot/3bot | threebot/__init__.py | threebot/__init__.py | # -*- encoding: utf-8 -*-
__version__ = '0.0.29'
| # -*- encoding: utf-8 -*-
__version__ = '0.0.28'
| bsd-3-clause | Python |
363583654998e404baba9b72860d2465bb3d339e | Remove convoluted meshgrid statement. | matthewwardrop/python-mplkit,matthewwardrop/python-mplkit,matthewwardrop/python-mplstyles,matthewwardrop/python-mplstyles | mplstyles/plots.py | mplstyles/plots.py | from matplotlib import cm
import matplotlib.pyplot as plt
from mplstyles import cmap as colormap
import numpy as np
import scipy.ndimage
def contour_image(x,y,Z,cmap=None,vmax=None,vmin=None,interpolation='nearest',contour_smoothing=0,contour_opts={},label_opts={},imshow_opts={},clegendlabels=[],label=False):
ax = plt.gca()
x_delta = float((x[-1]-x[0]))/(len(x)-1)/2.
y_delta = float((y[-1]-y[0]))/(len(y)-1)/2.
extent=(x[0],x[-1],y[0],y[-1])
extent_delta = (x[0]-x_delta,x[-1]+x_delta,y[0]-y_delta,y[-1]+y_delta)
ax.set_xlim(x[0],x[-1])
ax.set_ylim(y[0],y[-1])
if cmap is None:
cmap = colormap.reverse(cm.Blues)
Z = Z.transpose()
#plt.contourf(X,Y,self.pdata,interpolation=interpolation)
cs = ax.imshow(Z,interpolation=interpolation,origin='lower',aspect='auto',extent=extent_delta,cmap=cmap,vmax=vmax,vmin=vmin, **imshow_opts)
# Draw contours
if contour_smoothing != 0:
Z = scipy.ndimage.zoom(Z, contour_smoothing)
X, Y = np.meshgrid(x, y)
CS = ax.contour(X, Y, Z, extent=extent, origin='lower', **contour_opts )
# Label contours
if label:
ax.clabel(CS, **label_opts)
# Show contours in legend if desired
if len(clegendlabels) > 0:
for i in range(len(clegendlabels)):
CS.collections[i].set_label(clegendlabels[i])
#ax.legend()
return cs, CS
| from matplotlib import cm
import matplotlib.pyplot as plt
from mplstyles import cmap as colormap
import numpy as np
import scipy.ndimage
def contour_image(x,y,Z,cmap=None,vmax=None,vmin=None,interpolation='nearest',contour_smoothing=0,contour_opts={},label_opts={},imshow_opts={},clegendlabels=[],label=False):
ax = plt.gca()
x_delta = float((x[-1]-x[0]))/(len(x)-1)/2.
y_delta = float((y[-1]-y[0]))/(len(y)-1)/2.
extent=(x[0],x[-1],y[0],y[-1])
extent_delta = (x[0]-x_delta,x[-1]+x_delta,y[0]-y_delta,y[-1]+y_delta)
ax.set_xlim(x[0],x[-1])
ax.set_ylim(y[0],y[-1])
if cmap is None:
cmap = colormap.reverse(cm.Blues)
Z = Z.transpose()
#plt.contourf(X,Y,self.pdata,interpolation=interpolation)
cs = ax.imshow(Z,interpolation=interpolation,origin='lower',aspect='auto',extent=extent_delta,cmap=cmap,vmax=vmax,vmin=vmin, **imshow_opts)
# Draw contours
if contour_smoothing != 0:
Z = scipy.ndimage.zoom(Z, contour_smoothing)
X, Y = np.meshgrid(np.linspace(x[0],x[-1],Z.shape[1]), np.linspace(y[0],y[-1],Z.shape[0]))
CS = ax.contour(X, Y, Z, extent=extent, origin='lower', **contour_opts )
# Label contours
if label:
ax.clabel(CS, **label_opts)
# Show contours in legend if desired
if len(clegendlabels) > 0:
for i in range(len(clegendlabels)):
CS.collections[i].set_label(clegendlabels[i])
#ax.legend()
return cs, CS
| mit | Python |
7177f7e0263d8a5f2adf458f9bfe33bff12137e0 | fix syntax error | silshack/fall2013turtlehack | n_sided_polygon.py | n_sided_polygon.py | import turtle
import turtlehack
import random
# A function that draws an n-sided polygon
def n_sided_polygon(turtle, n, color="#FFFFFF", line_thickness=1):
'''
Draw an n-sided polygon
input: turtle, n, line_length
'''
# for n times:
# Draw a line, then turn 360/n degrees and draw another
# set initial parameters
turtle.degrees()
line_length=80
turtle.pensize(line_thickness)
turn_angle = (360/n)
i = 1
# Draw each line segment and turn
while (i <= n):
turtle.color(color)
turtle.pendown()
turtle.forward(line_length)
turtle.penup()
turtle.right(turn_angle)
i += 1
return 0
## MAIN ##
# set initial parameters
n=random.randint(3,12)
# create the Turle instance
graphic = turtle.Turtle()
# Call the polygon code
n_sided_polygon(graphic, n, turtlehack.random_color(), random.randint(4,8))
# Close and exit
ignore = raw_input("hit any key to continue:")
#graphic.done()
| import turtle
import turtlehack
import random
# A function that draws an n-sided polygon
def n_sided_polygon(turtle, n, color="#FFFFFF", line_thickness=1):
'''
Draw an n-sided polygon
input: turtle, n, line_length
'''
# for n times:
# Draw a line, then turn 360/n degrees and draw another
# set initial parameters
turtle.degrees()
line_length=80
turtle.pensize(line_thickness)
turn_angle = (360/n)
i = 1
# Draw each line segment and turn
while (i <= n):
turtle.color(color)
turtle.pendown()
turtle.forward(line_length)
turtle.penup()
turtle.right(turn_angle)
i += 1
return 0
## MAIN ##
# set initial parameters
n=random.randint(3,12)
# create the Turle instance
graphic = turtle.Turtle()
turtlehack.n_sided_polygon(graphic, n, turtlehack.random_color(), random.randint(4,8))
ignore = input("hit any key to continue:")
graphic.done()
| mit | Python |
458d61ffb5161394f8080cea59716b2f9cb492f3 | Add error message for not implemented error | pbutenee/ml-tutorial,pbutenee/ml-tutorial | nbgrader_config.py | nbgrader_config.py | c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '''##### Implement this part of the code #####
raise NotImplementedError("Code not implemented, follow the instructions.")'''}
| c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '##### Implement this part of the code #####\nraise NotImplementedError()'}
| mit | Python |
1cae5cf5b2874eb2bafc9486d4873abfa1a58366 | Add log_to_file method | tool-labs/ToolsWeb | toolsweb/__init__.py | toolsweb/__init__.py | # -*- coding: utf-8 -*-
import flask
import jinja2
import logging
import os.path
import oursql
def connect_to_database(database, host):
default_file = os.path.expanduser('~/replica.my.cnf')
if not os.path.isfile(default_file):
raise Exception('Database access not configured for this account!')
return oursql.connect(host=host, db=database,
read_default_file=default_file)
def connect_to_labsdb(project):
return connect_to_database(database=project + '_p',
host=project + '.labsdb')
def create_app(name, template_package=None, template_path=None,
log_file=None):
app = flask.Flask(name)
app_loader = app.jinja_loader
if template_package is not None:
app_loader = jinja2.PackageLoader(template_package)
elif template_path is not None:
app_loader = jinja2.FileSystemLoader(template_path)
app.jinja_loader = jinja2.ChoiceLoader([
app_loader,
jinja2.PackageLoader('toolsweb'),
])
return app
def log_to_file(app, log_file):
handler = logging.FileHandler(log_file)
app.logger.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
| # -*- coding: utf-8 -*-
import flask
import jinja2
import os.path
import oursql
def connect_to_database(database, host):
default_file = os.path.expanduser('~/replica.my.cnf')
if not os.path.isfile(default_file):
raise Exception('Database access not configured for this account!')
return oursql.connect(host=host, db=database,
read_default_file=default_file)
def connect_to_labsdb(project):
return connect_to_database(database=project + '_p',
host=project + '.labsdb')
def create_app(name, template_package=None, template_path=None):
app = flask.Flask(name)
app_loader = app.jinja_loader
if template_package is not None:
app_loader = jinja2.PackageLoader(template_package)
elif template_path is not None:
app_loader = jinja2.FileSystemLoader(template_path)
app.jinja_loader = jinja2.ChoiceLoader([
app_loader,
jinja2.PackageLoader('toolsweb'),
])
return app
| mit | Python |
9ff4fbcdf5b21d263e8b20abb0a3d0395ce28981 | Document the reason for accepting only `POST` requests on `/wiki_render`, and allow `GET` requests from `TRAC_ADMIN` for testing purposes. | pkdevbox/trac,pkdevbox/trac,pkdevbox/trac,pkdevbox/trac | trac/wiki/web_api.py | trac/wiki/web_api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
from trac.mimeview.api import Context
from trac.resource import Resource
from trac.web.api import IRequestHandler
from trac.wiki.formatter import format_to
class WikiRenderer(Component):
"""Wiki text renderer."""
implements(IRequestHandler)
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/wiki_render'
def process_request(self, req):
# Allow all POST requests (with a valid __FORM_TOKEN, ensuring that
# the client has at least some permission). Additionally, allow GET
# requests from TRAC_ADMIN for testing purposes.
if req.method != 'POST':
req.perm.require('TRAC_ADMIN')
realm = req.args.get('realm', 'wiki')
id = req.args.get('id')
version = req.args.get('version')
if version is not None:
try:
version = int(version)
except ValueError:
version = None
text = req.args.get('text', '')
flavor = req.args.get('flavor')
options = {}
if 'escape_newlines' in req.args:
options['escape_newlines'] = bool(req.args['escape_newlines'])
if 'shorten' in req.args:
options['shorten'] = bool(req.args['shorten'])
resource = Resource(realm, id=id, version=version)
context = Context.from_request(req, resource)
rendered = format_to(self.env, flavor, context, text, **options)
req.send(rendered.encode('utf-8'))
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
from trac.mimeview.api import Context
from trac.resource import Resource
from trac.web.api import IRequestHandler
from trac.wiki.formatter import format_to
class WikiRenderer(Component):
"""Wiki text renderer."""
implements(IRequestHandler)
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/wiki_render' and req.method == 'POST'
def process_request(self, req):
realm = req.args.get('realm', 'wiki')
id = req.args.get('id')
version = req.args.get('version')
if version is not None:
try:
version = int(version)
except ValueError:
version = None
text = req.args.get('text', '')
flavor = req.args.get('flavor')
options = {}
if 'escape_newlines' in req.args:
options['escape_newlines'] = bool(req.args['escape_newlines'])
if 'shorten' in req.args:
options['shorten'] = bool(req.args['shorten'])
resource = Resource(realm, id=id, version=version)
context = Context.from_request(req, resource)
rendered = format_to(self.env, flavor, context, text, **options)
req.send(rendered.encode('utf-8'))
| bsd-3-clause | Python |
bb696f7c5b97563339f04206e649b54759fc9c6b | add transform for in__id to base get method | lampwins/stackstorm-netbox | actions/lib/action.py | actions/lib/action.py |
from st2actions.runners.pythonrunner import Action
import requests
__all__ = [
'NetboxBaseAction'
]
class NetboxBaseAction(Action):
"""Base Action for all Netbox API based actions
"""
def __init__(self, config):
super(NetboxBaseAction, self).__init__(config)
def get(self, endpoint_uri, **kwargs):
"""Make a get request to the API URI passed in
"""
self.logger.info("Calling base get with kwargs: {}".format(kwargs))
if self.config['use_https']:
url = 'https://'
else:
url = 'http://'
url = url + self.config['hostname'] + endpoint_uri
headers = {
'Authorization': 'Token ' + self.config['api_token'],
'Accept': 'application/json'
}
# transform `in__id` if present
if kwargs.get('in__id'):
kwargs['in__id'] = ','.join(kwargs['in__id'])
r = requests.get(url, verify=self.config['ssl_verify'], headers=headers, params=kwargs)
return {'raw': r.json()}
|
from st2actions.runners.pythonrunner import Action
import requests
__all__ = [
'NetboxBaseAction'
]
class NetboxBaseAction(Action):
"""Base Action for all Netbox API based actions
"""
def __init__(self, config):
super(NetboxBaseAction, self).__init__(config)
def get(self, endpoint_uri, **kwargs):
"""Make a get request to the API URI passed in
"""
self.logger.info("Calling base get with kwargs: {}".format(kwargs))
if self.config['use_https']:
url = 'https://'
else:
url = 'http://'
url = url + self.config['hostname'] + endpoint_uri
headers = {
'Authorization': 'Token ' + self.config['api_token'],
'Accept': 'application/json'
}
r = requests.get(url, verify=self.config['ssl_verify'], headers=headers, params=kwargs)
return {'raw': r.json()}
| mit | Python |
e1074fbc814b238a8d6d878810a8ac665a169f03 | Fix template name in views | Nomadblue/django-nomad-country-blogs | nomadblog/views.py | nomadblog/views.py | from django.views.generic import ListView, DetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from nomadblog.models import Blog, Category
from nomadblog import get_post_model
DEFAULT_STATUS = getattr(settings, 'PUBLIC_STATUS', 0)
POST_MODEL = get_post_model()
class NomadBlogMixin(object):
def dispatch(self, request, *args, **kwargs):
if self.kwargs.get('country_code'):
self.blog = get_object_or_404(Blog, countries__code__iexact=self.kwargs.get('country_code'), slug=self.kwargs.get('blog_slug'))
else:
self.blog = Blog.objects.get(slug=settings.DEFAULT_BLOG_SLUG)
return super(NomadBlogMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, *args, **kwargs):
context = super(NomadBlogMixin, self).get_context_data(*args, **kwargs)
context['blog'] = self.blog
return context
class PostList(NomadBlogMixin, ListView):
model = POST_MODEL
template_name = 'nomadblog/post_list.html'
paginate_by = getattr(settings, 'POST_PAGINATE_BY', 25)
def get_queryset(self):
qs = super(PostList, self).get_queryset()
return qs.filter(bloguser__blog=self.blog).order_by('-pub_date')
class PostDetail(NomadBlogMixin, DetailView):
model = POST_MODEL
template_name = 'nomadblog/post_detail.html'
def get_object(self, queryset=None):
queryset = self.get_queryset().filter(bloguser__blog=self.blog)
return super(PostDetail, self).get_object(queryset)
class CategoriesList(NomadBlogMixin, ListView):
model = Category
paginate_by = getattr(settings, 'CATEGORY_PAGINATE_BY', 25)
class PostsByCategoryList(NomadBlogMixin, ListView):
model = POST_MODEL
template_name = 'nomadblog/post_list_by_category.html'
paginate_by = getattr(settings, 'POST_PAGINATE_BY', 25)
def get_queryset(self, *args, **kwargs):
qs = super(PostsByCategoryList, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs.get('category_slug', ''))
return qs.filter(categories=self.category)
def get_context_data(self, *args, **kwargs):
context = super(PostsByCategoryList, self).get_context_data(*args, **kwargs)
context['category'] = self.category
return context
| from django.views.generic import ListView, DetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from nomadblog.models import Blog, Category
from nomadblog import get_post_model
DEFAULT_STATUS = getattr(settings, 'PUBLIC_STATUS', 0)
POST_MODEL = get_post_model()
class NomadBlogMixin(object):
def dispatch(self, request, *args, **kwargs):
if self.kwargs.get('country_code'):
self.blog = get_object_or_404(Blog, countries__code__iexact=self.kwargs.get('country_code'), slug=self.kwargs.get('blog_slug'))
else:
self.blog = Blog.objects.get(slug=settings.DEFAULT_BLOG_SLUG)
return super(NomadBlogMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, *args, **kwargs):
context = super(NomadBlogMixin, self).get_context_data(*args, **kwargs)
context['blog'] = self.blog
return context
class PostList(NomadBlogMixin, ListView):
model = POST_MODEL
paginate_by = getattr(settings, 'POST_PAGINATE_BY', 25)
def get_queryset(self):
qs = super(PostList, self).get_queryset()
return qs.filter(bloguser__blog=self.blog).order_by('-pub_date')
class PostDetail(NomadBlogMixin, DetailView):
model = POST_MODEL
def get_object(self, queryset=None):
queryset = self.get_queryset().filter(bloguser__blog=self.blog)
return super(PostDetail, self).get_object(queryset)
class CategoriesList(NomadBlogMixin, ListView):
model = Category
paginate_by = getattr(settings, 'CATEGORY_PAGINATE_BY', 25)
class PostsByCategoryList(NomadBlogMixin, ListView):
model = POST_MODEL
template_name = 'nomadblog/post_list_by_category.html'
paginate_by = getattr(settings, 'POST_PAGINATE_BY', 25)
def get_queryset(self, *args, **kwargs):
qs = super(PostsByCategoryList, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs.get('category_slug', ''))
return qs.filter(categories=self.category)
def get_context_data(self, *args, **kwargs):
context = super(PostsByCategoryList, self).get_context_data(*args, **kwargs)
context['category'] = self.category
return context
| bsd-3-clause | Python |
44161337282d14a48bde278b6e1669e8b3c94e4e | Bump version to 0.1.7 | v1k45/django-notify-x,v1k45/django-notify-x,v1k45/django-notify-x | notify/__init__.py | notify/__init__.py | __version__ = "0.1.7"
| __version__ = "0.1.6"
| mit | Python |
72a827b8cca6dc100e7f0d2d92e0c69aa67ec956 | change name and docstring | schocco/mds-web,schocco/mds-web | apps/auth/iufOAuth.py | apps/auth/iufOAuth.py | from social.backends.oauth import BaseOAuth2
# see http://psa.matiasaguirre.net/docs/backends/implementation.html
class IUFOAuth2(BaseOAuth2):
"""IUF OAuth authentication backend"""
name = 'iuf'
AUTHORIZATION_URL = 'https://iufinc.org/login/oauth/authorize'
ACCESS_TOKEN_URL = 'https://iufinc.org/login/oauth/access_token'
SCOPE_SEPARATOR = ','
EXTRA_DATA = [
('id', 'id'),
('expires', 'expires')
]
def get_user_details(self, response):
"""Returns user details from IUF account"""
return {'username': response.get('user'),
'email': response.get('email') or '',
'first_name': response.get('first_name')} | from social.backends.oauth import BaseOAuth2
# see http://psa.matiasaguirre.net/docs/backends/implementation.html
class IUFOAuth2(BaseOAuth2):
"""Github OAuth authentication backend"""
name = 'github'
AUTHORIZATION_URL = 'https://iufinc.org/login/oauth/authorize'
ACCESS_TOKEN_URL = 'https://iufinc.org/login/oauth/access_token'
SCOPE_SEPARATOR = ','
EXTRA_DATA = [
('id', 'id'),
('expires', 'expires')
]
def get_user_details(self, response):
"""Returns user details from IUF account"""
return {'username': response.get('user'),
'email': response.get('email') or '',
'first_name': response.get('first_name')} | mit | Python |
140f96ab4cddebd465ad2fdcca4560c683ca5770 | add django-markdown url for tutorials app | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform | oeplatform/urls.py | oeplatform/urls.py | """oeplatform URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from oeplatform import settings
# This is used for Markdown forms in the tutorials app
from markdownx import urls as markdownx
handler500 = "base.views.handler500"
handler404 = "base.views.handler404"
urlpatterns = [
# This is used for Markdown forms in the tutorials app
url(r'^markdownx/', include(markdownx)),
url(r"^api/", include("api.urls")),
url(r"^", include("base.urls")),
url(r"^user/", include("login.urls")),
url(r"^factsheets/", include("modelview.urls")),
url(r"^dataedit/", include("dataedit.urls")),
url(r"^literature/", include("literature.urls")),
url(r"^ontology/", include("ontology.urls")),
url(r"^captcha/", include("captcha.urls")),
url(r"^tutorials/", include("tutorials.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| """oeplatform URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from oeplatform import settings
handler500 = "base.views.handler500"
handler404 = "base.views.handler404"
urlpatterns = [
url(r"^api/", include("api.urls")),
url(r"^", include("base.urls")),
url(r"^user/", include("login.urls")),
url(r"^factsheets/", include("modelview.urls")),
url(r"^dataedit/", include("dataedit.urls")),
url(r"^literature/", include("literature.urls")),
url(r"^ontology/", include("ontology.urls")),
url(r"^captcha/", include("captcha.urls")),
url(r"^tutorials/", include("tutorials.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| agpl-3.0 | Python |
7c77a7b14432a85447ff74e7aa017ca56c86e662 | Make api-tokens view exempt from CSRF checks | mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo | oidc_apis/views.py | oidc_apis/views.py | from django.http import JsonResponse
from django.views.decorators.http import require_http_methods
from oidc_provider.lib.utils.oauth2 import protected_resource_view
from django.views.decorators.csrf import csrf_exempt
from .api_tokens import get_api_tokens_by_access_token
@csrf_exempt
@require_http_methods(['GET', 'POST'])
@protected_resource_view(['openid'])
def get_api_tokens_view(request, token, *args, **kwargs):
"""
Get the authorized API Tokens.
:type token: oidc_provider.models.Token
:rtype: JsonResponse
"""
api_tokens = get_api_tokens_by_access_token(token, request=request)
response = JsonResponse(api_tokens, status=200)
response['Access-Control-Allow-Origin'] = '*'
response['Cache-Control'] = 'no-store'
response['Pragma'] = 'no-cache'
return response
| from django.http import JsonResponse
from django.views.decorators.http import require_http_methods
from oidc_provider.lib.utils.oauth2 import protected_resource_view
from .api_tokens import get_api_tokens_by_access_token
@require_http_methods(['GET', 'POST'])
@protected_resource_view(['openid'])
def get_api_tokens_view(request, token, *args, **kwargs):
"""
Get the authorized API Tokens.
:type token: oidc_provider.models.Token
:rtype: JsonResponse
"""
api_tokens = get_api_tokens_by_access_token(token, request=request)
response = JsonResponse(api_tokens, status=200)
response['Access-Control-Allow-Origin'] = '*'
response['Cache-Control'] = 'no-store'
response['Pragma'] = 'no-cache'
return response
| mit | Python |
7e9dd7469f88d676959141534809b0bc10fc9a66 | Print newline on de-initialization. | pfalcon/picotui | picotui/context.py | picotui/context.py | from .screen import Screen
class Context:
def __init__(self, cls=True, mouse=True):
self.cls = cls
self.mouse = mouse
def __enter__(self):
Screen.init_tty()
if self.mouse:
Screen.enable_mouse()
if self.cls:
Screen.cls()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.mouse:
Screen.disable_mouse()
Screen.goto(0, 50)
Screen.cursor(True)
Screen.deinit_tty()
# This makes sure that entire screenful is scrolled up, and
# any further output happens on a normal terminal line.
print()
| from .screen import Screen
class Context:
def __init__(self, cls=True, mouse=True):
self.cls = cls
self.mouse = mouse
def __enter__(self):
Screen.init_tty()
if self.mouse:
Screen.enable_mouse()
if self.cls:
Screen.cls()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.mouse:
Screen.disable_mouse()
Screen.goto(0, 50)
Screen.cursor(True)
Screen.deinit_tty()
| mit | Python |
4aeee052bdb2e1045d72401ea9f2595e62c6f510 | Hide stdout in text client | MatthewScholefield/mycroft-simple,MatthewScholefield/mycroft-simple | mycroft/interfaces/text_interface.py | mycroft/interfaces/text_interface.py | # Copyright (c) 2017 Mycroft AI, Inc.
#
# This file is part of Mycroft Light
# (see https://github.com/MatthewScholefield/mycroft-light).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
from io import StringIO
from threading import Event
from typing import Callable
from mycroft.interfaces.interface_plugin import InterfacePlugin
from mycroft.util import log
class StreamHandler(StringIO):
def __init__(self, handler: Callable):
super().__init__()
self.buffer = ''
self.handler = handler
def flush(self):
self.buffer = self.buffer.strip()
if self.buffer:
self.handler(self.buffer)
self.buffer = ''
def write(self, text):
self.buffer += text
if '\n' in text:
self.flush()
class TextInterface(InterfacePlugin):
"""Interact with Mycroft via a terminal"""
_config = {'prompt': 'Input: '}
def __init__(self, rt):
super().__init__(rt)
sys.stdout = StreamHandler(log.debug)
sys.stderr = StreamHandler(log.warning)
self.response_event = Event()
self.response_event.set()
self.prompt = self.config['prompt']
def owns_response(self):
return not self.response_event.is_set()
def run(self):
self.print(self.prompt, end='')
try:
while self.rt.main_thread:
query = input()
self.response_event.clear()
self.send_query(query)
self.response_event.wait()
except (EOFError, KeyboardInterrupt):
self.rt.main_thread.quit()
def on_query(self, query):
if query and not self.owns_response():
self.print(query)
def print(self, *args, **kwargs):
print(*args, file=sys.__stdout__, flush=True, **kwargs)
def on_response(self, package):
if not self.owns_response():
self.print()
self.print()
self.print(" " + package.text)
self.print()
self.print(self.prompt, end='')
self.response_event.set()
def on_exit(self):
self.print()
| # Copyright (c) 2017 Mycroft AI, Inc.
#
# This file is part of Mycroft Light
# (see https://github.com/MatthewScholefield/mycroft-light).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from threading import Event
from mycroft.interfaces.interface_plugin import InterfacePlugin
class TextInterface(InterfacePlugin):
"""Interact with Mycroft via a terminal"""
_config = {'prompt': 'Input: '}
def __init__(self, rt):
super().__init__(rt)
self.response_event = Event()
self.response_event.set()
self.prompt = self.config['prompt']
def owns_response(self):
return not self.response_event.is_set()
def run(self):
print(self.prompt, end='')
try:
while self.rt.main_thread:
query = input()
self.response_event.clear()
self.send_query(query)
self.response_event.wait()
except (EOFError, KeyboardInterrupt):
self.rt.main_thread.quit()
def on_query(self, query):
if query and not self.owns_response():
print(query)
def on_response(self, package):
if not self.owns_response():
print()
print()
print(" " + package.text)
print()
print(self.prompt, end='')
self.response_event.set()
def on_exit(self):
print()
| apache-2.0 | Python |
451c821118eff98d7e92b3a3f46b1a76048abbb5 | add wiki canned response | hzsweers/androiddev_bot | androiddev_bot/config.py | androiddev_bot/config.py | import praw
# Put your vars here
suspect_title_strings = ['?', 'help', 'stuck', 'why', 'my', 'feedback']
subreddit = 'androiddev'
# Canned responses
cans = {
'questions_thread': "Removed because, per sub rules, this doesn't merit its own post. We have a questions thread every day, please use it for questions like this.",
'rules': 'Removed because posts like this are against the sub rules.',
'wiki': "Removed because relevant information can be found in the /r/androiddev [wiki](https://www.reddit.com/r/androiddev/wiki/index)"
}
# Specify the keyword and what days they should be removed
weekly_threads = {
'anything': {
'day': 'Saturday',
'name': 'Weekly \"anything goes\"'
},
'hiring': {
'day': 'Monday',
'name': 'Weekly \"who\'s hiring?\"'
}
}
flair_mapping = {
'Library': 'library',
'Discussion': 'discussion',
'News': 'news',
'Tech Talk': 'talk',
}
def post_is_suspicious(post_to_check: praw.objects.Submission) -> bool:
"""
A function that can be passed a submission to check against and return whether or not it's "suspicious" or otherwise
deserving of closer attention.
:type post_to_check: praw.objects.Submission
:rtype : bool
:param post_to_check: The Submission instance to check
:return: True if suspicious, False if now
"""
return \
any(word in post_to_check.title.lower() for word in suspect_title_strings) \
or post_to_check.domain == 'stackoverflow.com' \
or (post_to_check.selftext and 'stackoverflow' in post_to_check.selftext.lower()) \
or (post_to_check.selftext_html and any(block in post_to_check.selftext_html for block in ['<code', '%3Ccode']))
| import praw
# Put your vars here
suspect_title_strings = ['?', 'help', 'stuck', 'why', 'my', 'feedback']
subreddit = 'androiddev'
# Canned responses
cans = {
'questions_thread': "Removed because, per sub rules, this doesn't merit its own post. We have a questions thread every day, please use it for questions like this.",
'rules': 'Removed because posts like this are against the sub rules.'
}
# Specify the keyword and what days they should be removed
weekly_threads = {
'anything': {
'day': 'Saturday',
'name': 'Weekly \"anything goes\"'
},
'hiring': {
'day': 'Monday',
'name': 'Weekly \"who\'s hiring?\"'
}
}
flair_mapping = {
'Library': 'library',
'Discussion': 'discussion',
'News': 'news',
'Tech Talk': 'talk',
}
def post_is_suspicious(post_to_check: praw.objects.Submission) -> bool:
"""
A function that can be passed a submission to check against and return whether or not it's "suspicious" or otherwise
deserving of closer attention.
:type post_to_check: praw.objects.Submission
:rtype : bool
:param post_to_check: The Submission instance to check
:return: True if suspicious, False if now
"""
return \
any(word in post_to_check.title.lower() for word in suspect_title_strings) \
or post_to_check.domain == 'stackoverflow.com' \
or (post_to_check.selftext and 'stackoverflow' in post_to_check.selftext.lower()) \
or (post_to_check.selftext_html and any(block in post_to_check.selftext_html for block in ['<code', '%3Ccode'])) | apache-2.0 | Python |
db2f6f4c2a70875aade3741fb57d0bc1b109ce3c | Add regexp to create_user form logic | kylemh/UO_CIS322,kylemh/UO_CIS322,kylemh/UO_CIS322 | app/views/create_user.py | app/views/create_user.py | from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip() # Aa09_.- allowed
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if re.match(r'^[\w.-]+$', username) and password:
# Form was completed with valid input
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
else:
flash('Please enter a username and password.')
return render_template('create_user.html')
| from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip()
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if not username or username == '' or not password or password == '':
flash('Please enter a username and password.')
else:
# Form was completed
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
return render_template('create_user.html')
| agpl-3.0 | Python |
8bcc09e4d3d0a14abd132e023bb4b4896aaac4f2 | make imports Python 3 friendly | nhmc/Barak | barak/absorb/__init__.py | barak/absorb/__init__.py | from .absorb import *
from .equiv_width import *
from .aod import *
| from absorb import *
from equiv_width import *
from aod import *
| bsd-3-clause | Python |
4b6bffdb048aa44b42cb80a54fca9a204ede833f | Update version to 0.0.3 | FindHotel/boto3facade,InnovativeTravel/boto3facade | boto3facade/metadata.py | boto3facade/metadata.py | # -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'boto3facade'
project = "boto3facade"
project_no_spaces = project.replace(' ', '')
version = '0.0.3'
description = 'A simple facade for boto3'
authors = ['German Gomez-Herrero', 'Innovative Travel Ltd']
authors_string = ', '.join(authors)
emails = ['german@innovativetravel.eu']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://github.com/InnovativeTravel/boto3facade'
| # -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'boto3facade'
project = "boto3facade"
project_no_spaces = project.replace(' ', '')
version = '0.0.2'
description = 'A simple facade for boto3'
authors = ['German Gomez-Herrero', 'Innovative Travel Ltd']
authors_string = ', '.join(authors)
emails = ['german@innovativetravel.eu']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://github.com/InnovativeTravel/boto3facade'
| mit | Python |
81df185279a8d46ca2e8ed9fbed4c3204522965e | Extend potential life of social media queue entries | nfletton/bvspca,nfletton/bvspca,nfletton/bvspca,nfletton/bvspca | bvspca/social/models.py | bvspca/social/models.py | import logging
from datetime import datetime, timedelta
from django.db import models
from wagtail.core.models import Page
logger = logging.getLogger('bvspca.social')
class SocialMediaPostable():
def social_media_ready_to_post(self):
raise NotImplemented()
def social_media_post_text(self):
raise NotImplemented()
def social_media_post_image(self):
raise NotImplemented()
class Meta:
abstract = True
class SocialMediaQueueManager(models.Manager):
def delete_old_entries(self):
"""
Delete all entries from queue older than 14 days
:return:
"""
count, counts_by_object_type = self.filter(date__lt=datetime.now() - timedelta(14)).delete()
if count > 0:
for object_type, object_count in counts_by_object_type.items():
logger.info('Deleted {} objects of type {}'.format(object_count, object_type))
def next_postable_entry(self):
"""
Get the next queued entry that is ready to post
:return:
"""
entries = self.order_by('+priority', '+date')
for entry in entries:
if entry.page.ready_to_post():
return entry
class SocialMediaQueue(models.Model):
"""
A queue of potential pages to post to social media
"""
PRIORITIES = ((1, 1), (2, 2), (3, 3), (4, 4), (5, 5))
date = models.DateTimeField(verbose_name='timestamp', auto_now_add=True)
priority = models.PositiveSmallIntegerField(choices=PRIORITIES)
page = models.OneToOneField(
Page,
on_delete=models.DO_NOTHING,
related_name='+',
)
objects = SocialMediaQueueManager()
class Meta:
pass
def ready(self):
return self.page.specific.social_media_ready_to_post()
def __str__(self):
return self.page.title
| import logging
from datetime import datetime, timedelta
from django.db import models
from wagtail.core.models import Page
logger = logging.getLogger('bvspca.social')
class SocialMediaPostable():
def social_media_ready_to_post(self):
raise NotImplemented()
def social_media_post_text(self):
raise NotImplemented()
def social_media_post_image(self):
raise NotImplemented()
class Meta:
abstract = True
class SocialMediaQueueManager(models.Manager):
def delete_old_entries(self):
"""
Delete all entries from queue older than 7 days
:return:
"""
count, counts_by_object_type = self.filter(date__lt=datetime.now() - timedelta(7)).delete()
if count > 0:
for object_type, object_count in counts_by_object_type.items():
logger.info('Deleted {} objects of type {}'.format(object_count, object_type))
def next_postable_entry(self):
"""
Get the next queued entry that is ready to post
:return:
"""
entries = self.order_by('+priority', '+date')
for entry in entries:
if entry.page.ready_to_post():
return entry
class SocialMediaQueue(models.Model):
"""
A queue of potential pages to post to social media
"""
PRIORITIES = ((1, 1), (2, 2), (3, 3), (4, 4), (5, 5))
date = models.DateTimeField(verbose_name='timestamp', auto_now_add=True)
priority = models.PositiveSmallIntegerField(choices=PRIORITIES)
page = models.OneToOneField(
Page,
on_delete=models.DO_NOTHING,
related_name='+',
)
objects = SocialMediaQueueManager()
class Meta:
pass
def ready(self):
return self.page.specific.social_media_ready_to_post()
def __str__(self):
return self.page.title
| mit | Python |
77f4b5b1bc3c30fb454212d3c4d2aa62d8c06ca8 | Update exportyaml.py | ebroecker/canmatrix,altendky/canmatrix,altendky/canmatrix,ebroecker/canmatrix | canmatrix/exportyaml.py | canmatrix/exportyaml.py | #!/usr/bin/env python
from __future__ import absolute_import
from .canmatrix import *
import codecs
import yaml
from yaml.representer import SafeRepresenter
from builtins import *
import copy
#Copyright (c) 2013, Eduard Broecker
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
#WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
#PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
#PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
#DAMAGE.
#
# this script exports yaml-files from a canmatrix-object
# yaml-files are just object-dumps human readable.
# This export is complete, no information lost
representers = False
try:
yaml.add_representer(int, SafeRepresenter.represent_int)
yaml.add_representer(long, SafeRepresenter.represent_long)
yaml.add_representer(unicode, SafeRepresenter.represent_unicode)
yaml.add_representer(str, SafeRepresenter.represent_unicode)
yaml.add_representer(list, SafeRepresenter.represent_list)
representers = True
except:
representers = False
# some error with representers ... continue anyway
def exportYaml(db, filename, **options):
newdb = copy.deepcopy(db)
for i,frame in enumerate(newdb._fl._list):
for j,signal in enumerate(frame._signals):
if signal._is_little_endian == False:
signal._startbit = signal.getStartbit(bitNumbering = 1, startLittle = True)
newdb._fl._list[i]._signals[j]._startbit = signal._startbit
f = open(filename,"wb")
if representers:
f.write(unicode(yaml.dump(newdb)))
else:
f.write(yaml.dump(newdb))
| #!/usr/bin/env python
from __future__ import absolute_import
from .canmatrix import *
import codecs
import yaml
from yaml.representer import SafeRepresenter
from builtins import *
import copy
#Copyright (c) 2013, Eduard Broecker
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
#WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
#PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
#PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
#DAMAGE.
#
# this script exports yaml-files from a canmatrix-object
# yaml-files are just object-dumps human readable.
# This export is complete, no information lost
representers = False
try:
yaml.add_representer(int, SafeRepresenter.represent_int)
yaml.add_representer(long, SafeRepresenter.represent_long)
yaml.add_representer(unicode, SafeRepresenter.represent_unicode)
yaml.add_representer(str, SafeRepresenter.represent_unicode)
yaml.add_representer(list, SafeRepresenter.represent_list)
representers = True
except:
representers = False
# some error with representers ... continue anyway
def exportYaml(db, filename, **options):
newdb = copy.deepcopy(db)
for i,frame in enumerate(newdb._fl._list):
for j,signal in enumerate(frame._signals):
if signal._is_little_endian == False:
signal._startbit = signal.getStartbit(bitNumbering = 1, startLittle = True)
newdb._fl._list[i]._signals[j]._startbit = signal._startbit
f = open(filename,"w")
if representers:
f.write(unicode(yaml.dump(newdb)))
else:
f.write(yaml.dump(newdb))
| bsd-2-clause | Python |
1a50aaf6be0f866046d88944607802a4e8661c61 | Revert "Test jenkins failure" | ihmeuw/vivarium | ceam_tests/test_util.py | ceam_tests/test_util.py | # ~/ceam/tests/test_util.py
from unittest import TestCase
from datetime import timedelta
from unittest.mock import Mock
import numpy as np
import pandas as pd
from ceam.engine import SimulationModule
from ceam.util import from_yearly, to_yearly, rate_to_probability, probability_to_rate
class TestRateConversions(TestCase):
"""
Simple regression tests for rate functions
"""
def test_from_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.01
new_rate = from_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.0008356164383561645)
def test_to_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.0008356164383561645
new_rate = to_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.01)
def test_rate_to_probability(self):
rate = 0.001
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob, 0.00099950016662497809)
def test_probablity_to_rate(self):
prob = 0.00099950016662497809
rate = probability_to_rate(prob)
self.assertAlmostEqual(rate, 0.001)
def test_rate_to_probability_symmetry(self):
rate = 0.0001
for _ in range(100):
prob = rate_to_probability(rate)
self.assertAlmostEqual(rate, probability_to_rate(prob))
rate += (1-0.0001)/100.0
def test_rate_to_probablity_vectorizability(self):
rate = 0.001
rate = np.array([rate]*100)
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob[10], 0.00099950016662497809)
self.assertAlmostEqual(np.sum(rate), np.sum(probability_to_rate(prob)))
# End.
| # ~/ceam/tests/test_util.py
from unittest import TestCase
from datetime import timedelta
from unittest.mock import Mock
import numpy as np
import pandas as pd
from ceam.engine import SimulationModule
from ceam.util import from_yearly, to_yearly, rate_to_probability, probability_to_rate
class TestRateConversions(TestCase):
"""
Simple regression tests for rate functions
"""
def test_from_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.01
new_rate = from_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.0008356164383561645)
def test_to_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.0008356164383561645
new_rate = to_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.01)
def test_rate_to_probability(self):
rate = 0.001
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob, 0.00099950016662497809)
def test_probablity_to_rate(self):
prob = 0.00099950016662497809
rate = probability_to_rate(prob)
self.assertAlmostEqual(rate, 0.001)
def test_rate_to_probability_symmetry(self):
rate = 0.0001
for _ in range(100):
prob = rate_to_probability(rate)
self.assertAlmostEqual(rate, probability_to_rate(prob))
rate += (1-0.0001)/100.0
def test_rate_to_probablity_vectorizability(self):
rate = 0.001
rate = np.array([rate]*100)
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob[10], 0.00099950016662497809)
self.assertAlmostEqual(np.sum(rate), np.sum(probability_to_rate(prob)))
def test_failure(self):
assert False
# End.
| bsd-3-clause | Python |
f1dd824978ad8581113a088afe1d1bdf99a00802 | Move to dev. | dials/dials,dials/dials,dials/dials,dials/dials,dials/dials | command_line/griddex.py | command_line/griddex.py | # LIBTBX_SET_DISPATCHER_NAME dev.dials.griddex
from __future__ import absolute_import, division, print_function
import libtbx.phil
import libtbx.load_env
help_message = '''
Cross reference indexing solutions.
Examples::
%s expts0.json refl0.json
''' % libtbx.env.dispatcher_name
phil_scope = libtbx.phil.parse("""
d_min = None
.type = float(value_min=0.0)
""")
def test_index(experiment, reflections):
from dials.algorithms.indexing import indexer
# map reflections to reciprocal space from image space
refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(
reflections, experiment.detector, experiment.scan)
indexer.indexer_base.map_centroids_to_reciprocal_space(
refl, experiment.detector, experiment.beam, experiment.goniometer)
# now compute fractional indices - in Python rather than trying to push
# everything to C++ for the moment
from scitbx import matrix
ub = matrix.sqr(experiment.crystal.get_A())
rub = ub.inverse()
from dials.array_family import flex
hkl_real = flex.vec3_double(len(reflections))
for j, rlp in enumerate(reflections['rlp']):
hkl_real[j] = rub * rlp
hkl = hkl_real.iround()
ms = 0.0
for (_h, _k, _l), (_hr, _kr, _lr) in zip(hkl, hkl_real):
ms += (_hr - _h) ** 2 + (_kr - _k) ** 2 + (_lr - _l) ** 2
import math
return math.sqrt(ms / len(reflections))
def run(args):
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from dials.util.options import flatten_reflections
import libtbx.load_env
usage = "%s [options] datablock.json reflections.pickle" % (
libtbx.env.dispatcher_name)
parser = OptionParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_reflections=True,
check_format=False,
epilog=help_message)
params, options = parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
reflections = flatten_reflections(params.input.reflections)
assert len(experiments) == len(reflections)
nn = len(experiments)
# FIXME check that all the crystals are in the primitive setting...
# now compute grid of reciprocal RMSD's
result = { }
for j, expt in enumerate(experiments):
for k, refl in enumerate(reflections):
result[j, k] = test_index(expt, refl)
# print matrix of results
print(' ' + ''.join(['%7d' % j for j in range(nn)]))
for k in range(nn):
record = ''.join([' %6.3f' % result[j, k] for j in range(nn)])
print('%8d' % k + record)
if __name__ == '__main__':
import sys
run(sys.argv[1:])
| from __future__ import absolute_import, division, print_function
import libtbx.phil
import libtbx.load_env
help_message = '''
Cross reference indexing solutions.
Examples::
%s expts0.json refl0.json
''' % libtbx.env.dispatcher_name
phil_scope = libtbx.phil.parse("""
d_min = None
.type = float(value_min=0.0)
""")
def test_index(experiment, reflections):
from dials.algorithms.indexing import indexer
# map reflections to reciprocal space from image space
refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(
reflections, experiment.detector, experiment.scan)
indexer.indexer_base.map_centroids_to_reciprocal_space(
refl, experiment.detector, experiment.beam, experiment.goniometer)
# now compute fractional indices - in Python rather than trying to push
# everything to C++ for the moment
from scitbx import matrix
ub = matrix.sqr(experiment.crystal.get_A())
rub = ub.inverse()
from dials.array_family import flex
hkl_real = flex.vec3_double(len(reflections))
for j, rlp in enumerate(reflections['rlp']):
hkl_real[j] = rub * rlp
hkl = hkl_real.iround()
ms = 0.0
for (_h, _k, _l), (_hr, _kr, _lr) in zip(hkl, hkl_real):
ms += (_hr - _h) ** 2 + (_kr - _k) ** 2 + (_lr - _l) ** 2
import math
return math.sqrt(ms / len(reflections))
def run(args):
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from dials.util.options import flatten_reflections
import libtbx.load_env
usage = "%s [options] datablock.json reflections.pickle" % (
libtbx.env.dispatcher_name)
parser = OptionParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_reflections=True,
check_format=False,
epilog=help_message)
params, options = parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
reflections = flatten_reflections(params.input.reflections)
assert len(experiments) == len(reflections)
nn = len(experiments)
# FIXME check that all the crystals are in the primitive setting...
# now compute grid of reciprocal RMSD's
result = { }
for j, expt in enumerate(experiments):
for k, refl in enumerate(reflections):
result[j, k] = test_index(expt, refl)
# print matrix of results
print(' ' + ''.join(['%7d' % j for j in range(nn)]))
for k in range(nn):
record = ''.join([' %6.3f' % result[j, k] for j in range(nn)])
print('%8d' % k + record)
if __name__ == '__main__':
import sys
run(sys.argv[1:])
| bsd-3-clause | Python |
9ebc7c3aee73f4a950d4975034f3c41417d59444 | clean up unused imports | stoivo/GitSavvy,divmain/GitSavvy,dvcrn/GitSavvy,ypersyntelykos/GitSavvy,dreki/GitSavvy,asfaltboy/GitSavvy,divmain/GitSavvy,asfaltboy/GitSavvy,dvcrn/GitSavvy,divmain/GitSavvy,dreki/GitSavvy,ddevlin/GitSavvy,ddevlin/GitSavvy,asfaltboy/GitSavvy,jmanuel1/GitSavvy,ralic/GitSavvy,ralic/GitSavvy,stoivo/GitSavvy,ddevlin/GitSavvy,jmanuel1/GitSavvy,theiviaxx/GitSavvy,theiviaxx/GitSavvy,ypersyntelykos/GitSavvy,stoivo/GitSavvy | common/util/__init__.py | common/util/__init__.py | import sublime
from plistlib import readPlistFromBytes
syntax_file_map = {}
def move_cursor(view, line_no, char_no):
# Line numbers are one-based, rows are zero-based.
line_no -= 1
# Negative line index counts backwards from the last line.
if line_no < 0:
last_line, _ = view.rowcol(view.size())
line_no = last_line + line_no + 1
pt = view.text_point(line_no, char_no)
view.sel().clear()
view.sel().add(sublime.Region(pt))
view.show(pt)
def _region_within_regions(all_outer, inner):
for outer in all_outer:
if outer.begin() <= inner.begin() and outer.end() >= inner.end():
return True
return False
def get_lines_from_regions(view, regions, valid_ranges=None):
full_line_regions = (view.full_line(region) for region in regions)
valid_regions = ([region for region in full_line_regions if _region_within_regions(valid_ranges, region)]
if valid_ranges else
full_line_regions)
return [line for region in valid_regions for line in view.substr(region).split("\n")]
def determine_syntax_files():
syntax_files = sublime.find_resources("*.tmLanguage")
for syntax_file in syntax_files:
try:
# Use `sublime.load_resource`, in case Package is `*.sublime-package`.
resource = sublime.load_resource(syntax_file)
plist = readPlistFromBytes(bytearray(resource, encoding="utf-8"))
for extension in plist["fileTypes"]:
if extension not in syntax_file_map:
syntax_file_map[extension] = []
extension_list = syntax_file_map[extension]
extension_list.append(syntax_file)
except:
continue
def get_syntax_for_file(filename):
extension = get_file_extension(filename)
try:
# Return last syntax file applicable to this extension.
return syntax_file_map[extension][-1]
except KeyError:
pass
return "Packages/Text/Plain text.tmLanguage"
def get_file_extension(filename):
period_delimited_segments = filename.split(".")
return "" if len(period_delimited_segments) < 2 else period_delimited_segments[-1]
| import itertools
import sublime
from plistlib import readPlistFromBytes
from .parse_diff import parse_diff
syntax_file_map = {}
def move_cursor(view, line_no, char_no):
# Line numbers are one-based, rows are zero-based.
line_no -= 1
# Negative line index counts backwards from the last line.
if line_no < 0:
last_line, _ = view.rowcol(view.size())
line_no = last_line + line_no + 1
pt = view.text_point(line_no, char_no)
view.sel().clear()
view.sel().add(sublime.Region(pt))
view.show(pt)
def _region_within_regions(all_outer, inner):
for outer in all_outer:
if outer.begin() <= inner.begin() and outer.end() >= inner.end():
return True
return False
def get_lines_from_regions(view, regions, valid_ranges=None):
full_line_regions = (view.full_line(region) for region in regions)
valid_regions = ([region for region in full_line_regions if _region_within_regions(valid_ranges, region)]
if valid_ranges else
full_line_regions)
return [line for region in valid_regions for line in view.substr(region).split("\n")]
def determine_syntax_files():
syntax_files = sublime.find_resources("*.tmLanguage")
for syntax_file in syntax_files:
try:
# Use `sublime.load_resource`, in case Package is `*.sublime-package`.
resource = sublime.load_resource(syntax_file)
plist = readPlistFromBytes(bytearray(resource, encoding="utf-8"))
for extension in plist["fileTypes"]:
if extension not in syntax_file_map:
syntax_file_map[extension] = []
extension_list = syntax_file_map[extension]
extension_list.append(syntax_file)
except:
continue
def get_syntax_for_file(filename):
extension = get_file_extension(filename)
try:
# Return last syntax file applicable to this extension.
return syntax_file_map[extension][-1]
except KeyError:
pass
return "Packages/Text/Plain text.tmLanguage"
def get_file_extension(filename):
period_delimited_segments = filename.split(".")
return "" if len(period_delimited_segments) < 2 else period_delimited_segments[-1]
| mit | Python |
5caf134eedc4ace933da8c2f21aacc5f5b1224ef | bump version | vmalloc/confetti | confetti/__version__.py | confetti/__version__.py | __version__ = "2.2.1"
| __version__ = "2.2.0"
| bsd-3-clause | Python |
3dae8f25cda4827397ab3812ea552ed27d37e757 | Remove contraints on dotted names | OCA/account-financial-tools,OCA/account-financial-tools | base_vat_optional_vies/models/res_partner.py | base_vat_optional_vies/models/res_partner.py | # Copyright 2015 Tecnativa - Antonio Espinosa
# Copyright 2017 Tecnativa - David Vidal
# Copyright 2019 FactorLibre - Rodrigo Bonilla
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
vies_passed = fields.Boolean(
string="VIES validation", readonly=True)
@api.model
def simple_vat_check(self, country_code, vat_number):
res = super(ResPartner, self).simple_vat_check(
country_code, vat_number,
)
partner = self.env.context.get('vat_partner')
if partner and self.vies_passed:
# Can not be sure that this VAT is signed up in VIES
partner.update({'vies_passed': False})
return res
@api.model
def vies_vat_check(self, country_code, vat_number):
partner = self.env.context.get('vat_partner')
if partner:
# If there's an exception checking VIES, the upstream method will
# call simple_vat_check and thus the flag will be removed
partner.update({'vies_passed': True})
res = super(ResPartner, self).vies_vat_check(country_code, vat_number)
if not res:
return self.simple_vat_check(country_code, vat_number)
return res
@api.constrains('vat')
def check_vat(self):
for partner in self:
partner = partner.with_context(vat_partner=partner)
super(ResPartner, partner).check_vat()
| # Copyright 2015 Tecnativa - Antonio Espinosa
# Copyright 2017 Tecnativa - David Vidal
# Copyright 2019 FactorLibre - Rodrigo Bonilla
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
vies_passed = fields.Boolean(
string="VIES validation", readonly=True)
@api.model
def simple_vat_check(self, country_code, vat_number):
res = super(ResPartner, self).simple_vat_check(
country_code, vat_number,
)
partner = self.env.context.get('vat_partner')
if partner and self.vies_passed:
# Can not be sure that this VAT is signed up in VIES
partner.update({'vies_passed': False})
return res
@api.model
def vies_vat_check(self, country_code, vat_number):
partner = self.env.context.get('vat_partner')
if partner:
# If there's an exception checking VIES, the upstream method will
# call simple_vat_check and thus the flag will be removed
partner.update({'vies_passed': True})
res = super(ResPartner, self).vies_vat_check(country_code, vat_number)
if not res:
return self.simple_vat_check(country_code, vat_number)
return res
@api.constrains('vat', 'commercial_partner.country_id')
def check_vat(self):
for partner in self:
partner = partner.with_context(vat_partner=partner)
super(ResPartner, partner).check_vat()
| agpl-3.0 | Python |
b5b31136ff716b423d78d307e107df4b8d8cfedc | Add images field on article model abstract, is many to many | jeanmask/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps | opps/core/models/article.py | opps/core/models/article.py | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from opps.core.models.published import Published
from opps.core.models.date import Date
from opps.core.models.channel import Channel
from opps.core.models.image import Image
from tagging.models import Tag
from tagging.fields import TagField
class Article(Published, Date):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"URL"), max_length=150, unique=True,
db_index=True)
short_title = models.CharField(_(u"Short title"), max_length=140,
blank=False, null=True)
headline = models.TextField(_(u"Headline"), blank=True)
channel = models.ForeignKey(Channel, verbose_name=_(u"Channel"))
content = models.TextField(_(u"Content"))
images = models.ManyToManyField(Image, through='ArticleImage',
related_name='article_images')
tags = TagField(null=True, verbose_name=_(u"Tags"))
class Meta:
abstract = True
def __unicode__(self):
return "{0}/{1}".format(self.site.name, self.slug)
class Post(Article):
credit = models.CharField(_("Credit"), blank=True, max_length=255)
class Meta:
app_label = 'core'
| # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from opps.core.models.published import Published
from opps.core.models.date import Date
from opps.core.models.channel import Channel
from tagging.models import Tag
from tagging.fields import TagField
class Article(Published, Date):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"URL"), max_length=150, unique=True,
db_index=True)
short_title = models.CharField(_(u"Short title"), max_length=140,
blank=False, null=True)
headline = models.TextField(_(u"Headline"), blank=True)
channel = models.ForeignKey(Channel, verbose_name=_(u"Channel"))
content = models.TextField(_(u"Content"))
tags = TagField(null=True, verbose_name=_(u"Tags"))
class Meta:
abstract = True
def __unicode__(self):
return "{0}/{1}".format(self.site.name, self.slug)
class Post(Article):
credit = models.CharField(_("Credit"), blank=True, max_length=255)
class Meta:
app_label = 'core'
| mit | Python |
a25141dca6ce6f8ead88c43fa7f5726afb2a9dba | Fix currency dialog to match model changes | coinbox/coinbox-mod-currency | cbpos/mod/currency/views/dialogs/currency.py | cbpos/mod/currency/views/dialogs/currency.py | from PySide import QtGui
import cbpos
logger = cbpos.get_logger(__name__)
from cbpos.mod.currency.models import Currency
from cbpos.mod.currency.views import CurrenciesPage
class CurrencyDialog(QtGui.QWidget):
def __init__(self):
super(CurrencyDialog, self).__init__()
message = cbpos.tr.currency._("Set up the currencies you will be using. You will be able to change them later also.")
self.message = QtGui.QLabel(message)
self.form = CurrenciesPage()
buttonBox = QtGui.QDialogButtonBox()
self.doneBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Close)
self.doneBtn.pressed.connect(self.onDoneButton)
layout = QtGui.QVBoxLayout()
layout.setSpacing(10)
layout.addWidget(self.message)
layout.addWidget(self.form)
layout.addWidget(buttonBox)
self.setLayout(layout)
def onDoneButton(self):
session = cbpos.database.session()
currency = session.query(Currency).first()
if currency is None:
QtGui.QMessageBox.warning(self, cbpos.tr.currency._("No currency"),
cbpos.tr.currency._("You have to sest up at least one currency"),
QtGui.QMessageBox.Ok)
return
cbpos.config["mod.currency", "default"] = unicode(currency.id)
self.close()
cbpos.ui.show_default()
| from PySide import QtGui
from cbpos.mod.currency.models.currency import Currency
import cbpos
class CurrencyDialog(QtGui.QWidget):
def __init__(self):
super(CurrencyDialog, self).__init__()
self.name = QtGui.QLineEdit()
self.symbol = QtGui.QLineEdit()
self.value = QtGui.QSpinBox()
self.value.setMinimum(0)
self.value.setSingleStep(1)
self.decimalPlaces = QtGui.QSpinBox()
self.decimalPlaces.setRange(0, 10)
self.decimalPlaces.setSingleStep(1)
self.digitGrouping = QtGui.QCheckBox()
buttonBox = QtGui.QDialogButtonBox()
self.okBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Ok)
self.okBtn.pressed.connect(self.onOkButton)
self.cancelBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Cancel)
self.cancelBtn.pressed.connect(self.onCancelButton)
rows = [["Name", self.name],
["Symbol", self.symbol],
["Value", self.value],
["Decimal Places", self.decimalPlaces],
["Digit Grouping", self.digitGrouping],
[buttonBox]]
form = QtGui.QFormLayout()
form.setSpacing(10)
[form.addRow(*row) for row in rows]
self.setLayout(form)
def onOkButton(self):
currency = Currency(name=self.name.text(),
symbol=self.symbol.text(),
value=self.value.text(),
decimal_places=self.decimalPlaces.value(),
digit_grouping=self.digitGrouping.isChecked()
)
session = cbpos.database.session()
session.add(currency)
session.commit()
cbpos.config["mod.currency", "default"] = unicode(currency.id)
self.close()
cbpos.ui.show_default()
def onCancelButton(self):
self.close()
| mit | Python |
0dacb5382e3099d0b9faa65e207c3be407747eeb | Use .array | toslunar/chainerrl,toslunar/chainerrl | chainerrl/optimizers/nonbias_weight_decay.py | chainerrl/optimizers/nonbias_weight_decay.py | # This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from chainer import cuda
class NonbiasWeightDecay(object):
"""Weight decay only for non-bias parameters.
This hook can be used just like chainer.optimizer_hooks.WeightDecay except
that this hook does not apply weight decay to bias parameters.
This hook assumes that all the bias parameters have the name of "b". Any
parameter whose name is "b" is considered as a bias and excluded from
weight decay.
"""
name = 'NonbiasWeightDecay'
call_for_each_param = True
timing = 'pre'
def __init__(self, rate):
self.rate = rate
def __call__(self, rule, param):
if param.name == 'b':
return
p, g = param.array, param.grad
if p is None or g is None:
return
with cuda.get_device_from_array(p) as dev:
if int(dev) == -1:
g += self.rate * p
else:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
kernel(p, self.rate, g)
| # This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from chainer import cuda
class NonbiasWeightDecay(object):
"""Weight decay only for non-bias parameters.
This hook can be used just like chainer.optimizer_hooks.WeightDecay except
that this hook does not apply weight decay to bias parameters.
This hook assumes that all the bias parameters have the name of "b". Any
parameter whose name is "b" is considered as a bias and excluded from
weight decay.
"""
name = 'NonbiasWeightDecay'
call_for_each_param = True
timing = 'pre'
def __init__(self, rate):
self.rate = rate
def __call__(self, rule, param):
if param.name == 'b':
return
p, g = param.data, param.grad
if p is None or g is None:
return
with cuda.get_device_from_array(p) as dev:
if int(dev) == -1:
g += self.rate * p
else:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
kernel(p, self.rate, g)
| mit | Python |
1006ac44b8ef9654976c1b57ccf20387877db1cb | Update results/title/forms100.py | moodpulse/l2,moodpulse/l2,moodpulse/l2,moodpulse/l2,moodpulse/l2 | results/title/forms100.py | results/title/forms100.py | from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import os.path
from laboratory.settings import FONTS_FOLDER
from directions.models import Issledovaniya
from reportlab.platypus import Paragraph, Table, TableStyle, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm
from reportlab.lib.enums import TA_CENTER
def form_01(iss: Issledovaniya):
pdfmetrics.registerFont(TTFont('PTAstraSerifBold', os.path.join(FONTS_FOLDER, 'PTAstraSerif-Bold.ttf')))
styleSheet = getSampleStyleSheet()
style = styleSheet["Normal"]
style.fontName = "PTAstraSerifBold"
style.fontSize = 12
style.leading = 8
style.spaceAfter = 0 * mm
style.alignment = TA_CENTER
hospital = iss.doc_confirmation.hospital
hospital_short_title = hospital.safe_short_title
hospital_address = hospital.safe_address
hospital_ogrn = hospital.safe_ogrn
data = [
[Paragraph("Министерство здравоохранения Российской Федерации", style)],
[Paragraph(hospital_short_title, style)],
[Paragraph(hospital_address, style)],
[Paragraph(f"Код ОГРН {hospital_ogrn}", style)],
[Spacer(1, 1 * mm)],
[Paragraph("<u>ВЫПИСКА ИЗ АМБУЛАТОРНОЙ КАРТЫ</u>", style)],
]
t = Table(data, colWidths=180 * mm)
t.setStyle(
TableStyle(
[
('ALIGN', (0, 0), (-1, -1), 'CENTER'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]
)
)
return t
| from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import os.path
from laboratory.settings import FONTS_FOLDER
from directions.models import Issledovaniya
from reportlab.platypus import Paragraph, Table, TableStyle, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm
from reportlab.lib.enums import TA_CENTER
def form_01(iss: Issledovaniya):
pdfmetrics.registerFont(TTFont('PTAstraSerifBold', os.path.join(FONTS_FOLDER, 'PTAstraSerif-Bold.ttf')))
styleSheet = getSampleStyleSheet()
style = styleSheet["Normal"]
style.fontName = "PTAstraSerifBold"
style.fontSize = 12
style.leading = 8
style.spaceAfter = 0 * mm
style.alignment = TA_CENTER
hospital = iss.doc_confirmation.hospital
hospital_short_title = hospital.safe_short_title
hospital_address = hospital.safe_address
hospital_ogrn = hospital.safe_ogrn
data = [
[Paragraph("Министерство здравоохранения Российской Федерации", style)],
[Paragraph(hospital_short_title, style)],
[Paragraph(hospital_address, style)],
[Paragraph(f"Код ОГРН {hospital_ogrn}", style)],
[Spacer(1, 1 * mm)],
[Paragraph("<u>ВЫПИСКА ИЗ АМБУЛАТОРНОЙ КАРТЫ</u>", style)],
]
t = Table(data, colWidths= 180 * mm)
t.setStyle(
TableStyle(
[
('ALIGN', (0, 0), (-1, -1), 'CENTER'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]
)
)
return t
| mit | Python |
f51369999441cb85ed730488e943580d707e8856 | use relative imports in parser/__init__.py | boakley/robotframework-lint | rflint/parser/__init__.py | rflint/parser/__init__.py | from .parser import (SuiteFolder, ResourceFile, SuiteFile, RobotFactory,
Testcase, Keyword, Row, Statement, TestcaseTable, KeywordTable)
from .tables import DefaultTable, SettingTable, UnknownTable, VariableTable, MetadataTable, RobotTable
| from parser import ResourceFile, SuiteFile, RobotFileFactory, Testcase, Keyword, Row, Statement
from tables import DefaultTable, SettingTable, UnknownTable, VariableTable, MetadataTable, RobotTable
| apache-2.0 | Python |
780f28cd91f92fea0dddee2b62bc659d244a8270 | Change create sample code to select indexes by eval set | rjegankumar/instacart_prediction_model | create_sample.py | create_sample.py | # importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
s = round(3214874 * 0.1)
i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
s = round(131209 * 0.1)
j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
# create a sample of test orders
s = round(75000 * 0.1)
k = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="test"].index), s))
orders_df.loc[k,:].to_csv("Data/orders_test_sample.csv", index = False)
# create a sample of prior order products
order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id')
order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False)
# create a sample of train order products
order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id')
order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False)
| # importing modules/ libraries
import pandas as pd
import random
import numpy as np
# create a sample of prior orders
orders_df = pd.read_csv("Data/orders.csv")
s = round(3214874 * 0.1)
i = sorted(random.sample(range(1,3214874), s))
orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False)
# create a sample of train orders
s = round(131209 * 0.1)
j = sorted(random.sample(range(1,131209), s))
orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False)
# create a sample of test orders
s = round(75000 * 0.1)
k = sorted(random.sample(range(1,75000), s))
orders_df.loc[k,:].to_csv("Data/orders_test_sample.csv", index = False)
# create a sample of prior order products
order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id')
order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False)
# create a sample of train order products
order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id')
order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False)
| mit | Python |
fee30c4017da4d41a9487d961ba543d2d1e20e85 | Add explicit Note join relationship on NoteContent model. (also remove extraneous comments on old date format) | icasdri/tuhi-flask | tuhi_flask/models.py | tuhi_flask/models.py | # Copyright 2015 icasdri
#
# This file is part of tuhi-flask.
#
# tuhi-flask is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tuhi-flask is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with tuhi-flask. If not, see <http://www.gnu.org/licenses/>.
from flask import current_app as app
from sqlalchemy import Column, Integer, String, CHAR, Text, Boolean, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from werkzeug.security import generate_password_hash, check_password_hash
from tuhi_flask.database import Base
class User(Base):
__tablename__ = 'users'
user_id = Column(Integer, primary_key=True)
username = Column(String, unique=True, index=True)
password_hash = Column(String)
def __init__(self, username, password):
self.username = username
self.set_password(password)
def set_password(self, password):
self.password_hash = generate_password_hash(password,
method=app.config['PASSWORD_HASH_METHOD'],
salt_length=app.config['PASSWORD_SALT_LENGTH'])
def check_password(self, password):
return check_password_hash(self.password_hash, password)
class Note(Base):
__tablename__ = 'notes'
note_id = Column(CHAR(36), primary_key=True)
user_id = Column(Integer, ForeignKey('users.user_id'), index=True)
title = Column(String)
deleted = Column(Boolean, default=False)
date_modified = Column(Integer, index=True) # Seconds from epoch
class NoteContent(Base):
__tablename__ = 'note_contents'
note_content_id = Column(CHAR(36), primary_key=True)
note_id = Column(CHAR(36), ForeignKey('notes.note_id'), index=True)
data = Column(Text)
date_created = Column(Integer, index=True) # Seconds from epoch
note = relationship("Note")
| # Copyright 2015 icasdri
#
# This file is part of tuhi-flask.
#
# tuhi-flask is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tuhi-flask is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with tuhi-flask. If not, see <http://www.gnu.org/licenses/>.
from flask import current_app as app
from sqlalchemy import Column, Integer, String, CHAR, Text, Boolean, DateTime, ForeignKey
from werkzeug.security import generate_password_hash, check_password_hash
from tuhi_flask.database import Base
class User(Base):
__tablename__ = 'users'
user_id = Column(Integer, primary_key=True)
username = Column(String, unique=True, index=True)
password_hash = Column(String)
def __init__(self, username, password):
self.username = username
self.set_password(password)
def set_password(self, password):
self.password_hash = generate_password_hash(password,
method=app.config['PASSWORD_HASH_METHOD'],
salt_length=app.config['PASSWORD_SALT_LENGTH'])
def check_password(self, password):
return check_password_hash(self.password_hash, password)
class Note(Base):
__tablename__ = 'notes'
note_id = Column(CHAR(36), primary_key=True)
user_id = Column(Integer, ForeignKey('users.user_id'), index=True)
title = Column(String)
deleted = Column(Boolean, default=False)
date_modified = Column(Integer, index=True) # Seconds from epoch
# date_modified = Column(DateTime) # May need to use Integer from epoch here
class NoteContent(Base):
__tablename__ = 'note_contents'
note_content_id = Column(CHAR(36), primary_key=True)
note_id = Column(CHAR(36), ForeignKey('notes.note_id'), index=True)
data = Column(Text)
date_created = Column(Integer, index=True) # Seconds from epoch
# date_created = Column(DateTime) # May need to use Integer from epoch here
| agpl-3.0 | Python |
ee76ae4f41be17a0f6a482273e99783df8212004 | Reconfigure key repeat (should change to be configurable) | haikuginger/riker | riker/worker/utils.py | riker/worker/utils.py | from logging import getLogger
import tempfile
from threading import Thread
import lirc
from django.conf import settings
from systemstate.models import RemoteButton
from systemstate.utils import push_button
LOGGER = getLogger(__name__)
LIRCRC_TEMPLATE = '''
begin
prog = {lirc_name}
button = {key_name}
config = {key_name}
repeat = 2
delay = 3
end
'''
class LircListener(Thread):
def __init__(self, lirc_name):
self.lirc_name = lirc_name
self.lircrc_filename = create_lircrc_tempfile()
super(LircListener, self).__init__()
def run(self):
lirc.init(self.lirc_name, self.lircrc_filename)
listen(self.lirc_name, self.lircrc_filename)
def listen(lirc_name, lircrc_filename, callback=None):
lirc.init(lirc_name, lircrc_filename)
callback = callback or push_button
while True:
for key_code in lirc.nextcode():
LOGGER.warning(key_code)
callback(key_code)
def create_lircrc_tempfile(lirc_name):
buttons = RemoteButton.objects.all().values_list('lirc_code', flat=True)
with tempfile.NamedTemporaryFile(delete=False) as lircrc_file:
lircrc_file.write(generate_lircrc(lirc_name, buttons).encode('ascii'))
return lircrc_file.name
def generate_lircrc(name, buttons):
return '\n'.join(
LIRCRC_TEMPLATE.format(
lirc_name=name,
key_name=button,
) for button in buttons
) | from logging import getLogger
import tempfile
from threading import Thread
import lirc
from django.conf import settings
from systemstate.models import RemoteButton
from systemstate.utils import push_button
LOGGER = getLogger(__name__)
LIRCRC_TEMPLATE = '''
begin
prog = {lirc_name}
button = {key_name}
config = {key_name}
end
'''
class LircListener(Thread):
def __init__(self, lirc_name):
self.lirc_name = lirc_name
self.lircrc_filename = create_lircrc_tempfile()
super(LircListener, self).__init__()
def run(self):
lirc.init(self.lirc_name, self.lircrc_filename)
listen(self.lirc_name, self.lircrc_filename)
def listen(lirc_name, lircrc_filename, callback=None):
lirc.init(lirc_name, lircrc_filename)
callback = callback or push_button
while True:
for key_code in lirc.nextcode():
LOGGER.warning(key_code)
callback(key_code)
def create_lircrc_tempfile(lirc_name):
buttons = RemoteButton.objects.all().values_list('lirc_code', flat=True)
with tempfile.NamedTemporaryFile(delete=False) as lircrc_file:
lircrc_file.write(generate_lircrc(lirc_name, buttons).encode('ascii'))
return lircrc_file.name
def generate_lircrc(name, buttons):
return '\n'.join(
LIRCRC_TEMPLATE.format(
lirc_name=name,
key_name=button,
) for button in buttons
) | mit | Python |
0827911184bf43a6dd50712444d3f9385a64eb31 | support combining bigrams | enkiv2/constrained-writer | constraintWriterTool.py | constraintWriterTool.py | #!/usr/bin/env python
from autosuggest import *
import os, sys
from sys import argv, exit
def printUsage():
print("Usage: constraintWriterTool action [options]\nActions:\n\tsuggest\t\tbigramfile word\n\tsuggestPfx\tbigramfile word prefix\n\tinWhitelist\tbigramfile word\n\tinBlacklist\tbigramfile word\n\tcompile\t\tcorpus bigramfile\n\tcompileMulti\tbigramfile corpus [corpus_2 ... corpus_n]\n\tcombine\t\tbigramfile_out [bigramfile_in ... ]\n")
exit(1)
if len(argv)<4:
printUsage()
world={}
if argv[1] in ["suggest", "suggestPfx", "inWhitelist", "inBlacklist"]:
def inBlacklist(world, word):
return checkWhiteList(world, word, True)
def pfx(world, word):
return bigramSuggestPfx(world, word, argv[4])
funcs={"suggest":bigramSuggest, "inWhitelist":checkWhiteList, "inBlacklist":inBlacklist, "suggestPfx":pfx}
world=loadBigrams(argv[2])
print(funcs[argv[1]](world, argv[3]))
exit(0)
elif argv[1]=="compile":
with open(argv[2], 'r') as f:
saveBigrams(corpus2bigrams(f.read()), argv[3])
elif argv[1]=="compileMulti":
corpora=[]
for fname in argv[3:]:
with open(fname, 'r') as f:
corpora.append(f.read())
saveBigrams(corpus2bigrams("\n".join(corpora)), argv[2])
elif argv[1]=="combine":
bigrams={}
for fname in argv[3:]:
world=loadBigrams(fname)
for w1 in world.keys():
if not (w1 in bigrams):
bigrams[w1]={}
for w2 in world[w1].keys():
if not w2 in bigrams[w1]:
bigrams[w1][w2]=0
bigrams[w1][w2]+=world[w1][w2]
saveBigrams(bigrams, argv[2])
| #!/usr/bin/env python
from autosuggest import *
import os, sys
from sys import argv, exit
def printUsage():
print("Usage: constraintWriterTool action [options]\nActions:\n\tsuggest\t\tbigramfile word\n\tsuggestPfx\t\tbigramfile word prefix\n\tinWhitelist\tbigramfile word\n\tinBlacklist\tbigramfile word\n\tcompile\t\tcorpus bigramfile\n\tcompileMulti\tbigramfile corpus [corpus_2 ... corpus_n]\n")
exit(1)
if len(argv)<4:
printUsage()
world={}
if argv[1] in ["suggest", "suggestPfx", "inWhitelist", "inBlacklist"]:
def inBlacklist(world, word):
return checkWhiteList(world, word, True)
def pfx(world, word):
return bigramSuggestPfx(world, word, argv[4])
funcs={"suggest":bigramSuggest, "inWhitelist":checkWhiteList, "inBlacklist":inBlacklist, "suggestPfx":pfx}
world=loadBigrams(argv[2])
print(funcs[argv[1]](world, argv[3]))
exit(0)
elif argv[1]=="compile":
with open(argv[2], 'r') as f:
saveBigrams(corpus2bigrams(f.read()), argv[3])
elif argv[1]=="compileMulti":
corpora=[]
for fname in argv[3:]:
with open(fname, 'r') as f:
corpora.append(f.read())
saveBigrams(corpus2bigrams("\n".join(corpora)), argv[2])
| bsd-3-clause | Python |
506b4e510b60d02bf7bfeb23bf181c483ec5a458 | Reduce error message size by not printing entire traceback | JacobAMason/Boa | src/Client.py | src/Client.py | #!python
__author__ = 'JacobAMason'
import sys
from twisted.words.protocols import irc
from twisted.internet import protocol, reactor
import StringIO
class Bot(irc.IRCClient):
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def signedOn(self):
self.join(self.factory.channel)
print "Signed on as %s." % (self.nickname)
def joined(self, channel):
print "Joined %s." % (channel)
def privmsg(self, user, channel, message):
if not message.startswith(self.nickname):
return
else:
idx = message.find(' ')
message = message[idx+1:]
# create file-like string to capture output
codeOut = StringIO.StringIO()
codeErr = StringIO.StringIO()
# capture output and errors
sys.stdout = codeOut
sys.stderr = codeErr
errorText = ""
try:
exec message
except Exception, err:
errorText = str(err)
# restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
s = codeErr.getvalue()
if s:
self.msg(channel, "error: %s\n" % s)
if errorText:
self.msg(channel, "error: %s\n" % errorText)
s = codeOut.getvalue()
if s:
self.msg(channel, "%s" % s)
codeOut.close()
codeErr.close()
def dataReceived(self, bytes):
print str(bytes).rstrip()
# Make sure to up-call - otherwise all of the IRC logic is disabled!
return irc.IRCClient.dataReceived(self, bytes)
class BotFactory(protocol.ClientFactory):
protocol = Bot
def __init__(self, channel, nickname="Boa"):
self.channel = channel
self.nickname = nickname
def clientConnectionLost(self, connector, reason):
print "Lost connection (%s), reconnecting..." % (reason)
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "Could not connect: %s" % (reason)
if __name__ == "__main__":
channel = sys.argv[1]
reactor.connectTCP("coop.test.adtran.com", 6667, BotFactory('#' + channel))
reactor.run()
| #!python
__author__ = 'JacobAMason'
import sys
from twisted.words.protocols import irc
from twisted.internet import protocol, reactor
import StringIO
import traceback
class Bot(irc.IRCClient):
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def signedOn(self):
self.join(self.factory.channel)
print "Signed on as %s." % (self.nickname)
def joined(self, channel):
print "Joined %s." % (channel)
def privmsg(self, user, channel, message):
if not message.startswith(self.nickname):
return
else:
idx = message.find(' ')
message = message[idx+1:]
# create file-like string to capture output
codeOut = StringIO.StringIO()
codeErr = StringIO.StringIO()
# capture output and errors
sys.stdout = codeOut
sys.stderr = codeErr
# https://stackoverflow.com/questions/3702675/how-to-print-the-full-traceback-without-halting-the-program
errorText = ""
try:
exec message
except Exception, err:
errorText = traceback.format_exc()
# restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
s = codeErr.getvalue()
if s:
self.msg(channel, "error: %s\n" % s)
if errorText:
self.msg(channel, "error: %s\n" % errorText)
s = codeOut.getvalue()
if s:
self.msg(channel, "%s" % s)
codeOut.close()
codeErr.close()
def dataReceived(self, bytes):
print str(bytes).rstrip()
# Make sure to up-call - otherwise all of the IRC logic is disabled!
return irc.IRCClient.dataReceived(self, bytes)
class BotFactory(protocol.ClientFactory):
protocol = Bot
def __init__(self, channel, nickname="Boa"):
self.channel = channel
self.nickname = nickname
def clientConnectionLost(self, connector, reason):
print "Lost connection (%s), reconnecting..." % (reason)
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "Could not connect: %s" % (reason)
if __name__ == "__main__":
channel = sys.argv[1]
reactor.connectTCP("coop.test.adtran.com", 6667, BotFactory('#' + channel))
reactor.run()
| mit | Python |
1f19fa52e40db1f28d620aa8bf75745e814c0f81 | Remove unused import | r-robles/rd-bot | cogs/fun.py | cogs/fun.py | import discord
from discord.ext import commands
from utils.messages import ColoredEmbed
class Fun:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def xkcd(self, ctx):
"""See the latest XKCD comic."""
async with self.bot.session.get('https://xkcd.com/info.0.json') as r:
if r.status == 200:
json = await r.json()
embed = ColoredEmbed(title=json['title'],
description=json['alt'])
embed.set_image(url=json['img'])
await ctx.send(embed=embed)
@commands.command()
async def lenny(self, ctx):
"""( ͡° ͜ʖ ͡°)"""
await ctx.send('( ͡° ͜ʖ ͡°)')
def setup(bot):
bot.add_cog(Fun(bot))
| import random
import discord
from discord.ext import commands
from utils.messages import ColoredEmbed
class Fun:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def xkcd(self, ctx):
"""See the latest XKCD comic."""
async with self.bot.session.get('https://xkcd.com/info.0.json') as r:
if r.status == 200:
json = await r.json()
embed = ColoredEmbed(title=json['title'],
description=json['alt'])
embed.set_image(url=json['img'])
await ctx.send(embed=embed)
@commands.command()
async def lenny(self, ctx):
"""( ͡° ͜ʖ ͡°)"""
await ctx.send('( ͡° ͜ʖ ͡°)')
def setup(bot):
bot.add_cog(Fun(bot))
| mit | Python |
14eaff694912320296412f2e4ca51072c5dddf49 | add unit_testing_only decorator | qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/userreports/dbaccessors.py | corehq/apps/userreports/dbaccessors.py | from corehq.apps.domain.dbaccessors import get_docs_in_domain_by_class
from corehq.apps.domain.models import Domain
from corehq.util.test_utils import unit_testing_only
def get_number_of_report_configs_by_data_source(domain, data_source_id):
"""
Return the number of report configurations that use the given data source.
"""
from corehq.apps.userreports.models import ReportConfiguration
return ReportConfiguration.view(
'userreports/report_configs_by_data_source',
reduce=True,
key=[domain, data_source_id]
).one()['value']
@unit_testing_only
def get_all_report_configs():
all_domains = Domain.get_all()
for domain in all_domains:
for report_config in get_report_configs_for_domain(domain.name):
yield report_config
def get_report_configs_for_domain(domain):
from corehq.apps.userreports.models import ReportConfiguration
return sorted(
get_docs_in_domain_by_class(domain, ReportConfiguration),
key=lambda report: report.title,
)
| from django.conf import settings
from dimagi.utils.couch.database import iter_docs
from corehq.apps.domain.dbaccessors import get_docs_in_domain_by_class
from corehq.apps.domain.models import Domain
def get_number_of_report_configs_by_data_source(domain, data_source_id):
"""
Return the number of report configurations that use the given data source.
"""
from corehq.apps.userreports.models import ReportConfiguration
return ReportConfiguration.view(
'userreports/report_configs_by_data_source',
reduce=True,
key=[domain, data_source_id]
).one()['value']
def get_all_report_configs():
assert settings.UNIT_TESTING
all_domains = Domain.get_all()
for domain in all_domains:
for report_config in get_report_configs_for_domain(domain.name):
yield report_config
def get_report_configs_for_domain(domain):
from corehq.apps.userreports.models import ReportConfiguration
return sorted(
get_docs_in_domain_by_class(domain, ReportConfiguration),
key=lambda report: report.title,
)
| bsd-3-clause | Python |
e595d823e303a6db0a9c7e24f6a9d1644615009c | Bump version of CaptchaService.py | vuolter/pyload,vuolter/pyload,vuolter/pyload | module/plugins/internal/CaptchaService.py | module/plugins/internal/CaptchaService.py | # -*- coding: utf-8 -*-
from module.plugins.internal.Captcha import Captcha
class CaptchaService(Captcha):
__name__ = "CaptchaService"
__type__ = "captcha"
__version__ = "0.35"
__status__ = "stable"
__description__ = """Base anti-captcha service plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def init(self):
self.key = None #: Last key detected
#@TODO: Recheck in 0.4.10
def retrieve_key(self, data):
if self.detect_key(data):
return self.key
else:
self.fail(_("%s key not found") % self.__name__)
def retrieve_data(self):
return self.pyfile.plugin.data or self.pyfile.plugin.last_html or ""
def detect_key(self, data=None):
raise NotImplementedError
def challenge(self, key=None, data=None):
raise NotImplementedError
def result(self, server, challenge):
raise NotImplementedError
| # -*- coding: utf-8 -*-
from module.plugins.internal.Captcha import Captcha
class CaptchaService(Captcha):
__name__ = "CaptchaService"
__type__ = "captcha"
__version__ = "0.34"
__status__ = "stable"
__description__ = """Base anti-captcha service plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def init(self):
self.key = None #: Last key detected
#@TODO: Recheck in 0.4.10
def retrieve_key(self, data):
if self.detect_key(data):
return self.key
else:
self.fail(_("%s key not found") % self.__name__)
def retrieve_data(self):
return self.pyfile.plugin.data or self.pyfile.plugin.last_html or ""
def detect_key(self, data=None):
raise NotImplementedError
def challenge(self, key=None, data=None):
raise NotImplementedError
def result(self, server, challenge):
raise NotImplementedError
| agpl-3.0 | Python |
5efc40cd9be0c212f142d7469a9bf6f44da0827a | add story support in client with -s boolean operator | b3nab/instapy-cli,b3nab/instapy-cli | instapy_cli/__main__.py | instapy_cli/__main__.py | import sys
from platform import python_version
from instapy_cli.cli import InstapyCli as client
from optparse import OptionParser
import pkg_resources # part of setuptools
version = pkg_resources.require('instapy_cli')[0].version
def main(args=None):
print('instapy-cli ' + version + ' | python ' + python_version())
parser = OptionParser(usage="usage: %prog [options]")
parser.add_option('-u', dest='username', help='username')
parser.add_option('-p', dest='password', help='password')
parser.add_option('-f', dest='file', help='file path or url')
parser.add_option('-t', dest='caption', help='caption text')
parser.add_option('-s', dest='story', action='store_true', help='publish to story')
# parser.add_option('-h', dest='help', help='help')
(options, args) = parser.parse_args(args)
if args is None or (
not options.username and
not options.password and
not options.file and
not (options.caption or options.story)
):
print('[USE] instapy -u USR -p PSW -f FILE/LINK -t \'TEXT CAPTION\'')
print('\nFor other reference go to >> https://github.com/b3nab/instapy-cli')
return
if not options.username:
parser.error('Username is required')
password = options.password
if not options.password:
import getpass
password = getpass.getpass()
if not options.file:
parser.error('File path or url link is required to create a media to upload')
story = options.story
if not story:
story = False
with client(options.username, password) as cli:
text = options.caption or ''
cli.upload(options.file, text, story)
if __name__ == '__main__':
main() | import sys
from platform import python_version
from instapy_cli.cli import InstapyCli as client
from optparse import OptionParser
import pkg_resources # part of setuptools
version = pkg_resources.require('instapy_cli')[0].version
'''
TODO:
- use instapy_cli.media to download image link and use it for upload and configure_photo
- rewrite main to support file and links for media
'''
def main(args=None):
welcome_msg = 'instapy-cli'
print('instapy ' + version + ' | python ' + python_version())
# cli = client()
# cli.loop(args)
parser = OptionParser(usage="usage: %prog [options]")
parser.add_option('-u', dest='username', help='username')
parser.add_option('-p', dest='password', help='password')
parser.add_option('-f', dest='file', help='file path or url')
parser.add_option('-t', dest='caption', help='caption text')
# parser.add_option('-h', dest='help', help='help')
(options, args) = parser.parse_args(args)
if args is None or (
not options.username and
not options.password and
not options.file and
not options.caption
):
print('[USE] instapy -u USR -p PSW -f FILE/LINK -t \'TEXT CAPTION\'')
print('\nFor other reference go to >> https://github.com/b3nab/instapy-cli')
return
if not options.username:
parser.error('Username is required')
password = options.password
if not options.password:
import getpass
password = getpass.getpass()
if not options.file:
parser.error('File path or url link is required to create a media to upload')
with client(options.username, password) as cli:
text = options.caption or ''
cli.upload(options.file, text)
if __name__ == '__main__':
main() | mit | Python |
f26c2059ff6e2a595097ef7a03efe149f9e253eb | Add default images for podcasts if necessary | up1/blog-1,up1/blog-1,up1/blog-1 | iterator.py | iterator.py | import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
if re.search('podcast', filename):
if re.search('^hero: ', contents[6]):
print filename
contents.insert(6, 'hero: /blog/images/category/podcasts.jpg\n')
f = file.open(filename, "w")
f.write("".join(contents))
f.close()
| import os, re, requests
rootdir = '_posts'
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filename = os.path.join(subdir, file)
f = open(filename, "r")
contents = f.readlines()
f.close()
# Find first image
for key, line in enumerate(contents):
src = re.search('\!\[.*?\]\((.*?)\)', line)
if src:
wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))
if wordpress_src:
image_src = wordpress_src.group(1)
path = 'images/wordpress/'+image_src
print 'Retrieving ' + path + '...'
if not os.path.isfile(path):
print path
f = open(path, "w")
f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content)
f.close()
continue
f = open(filename, "w")
contents = "".join(contents)
f.write(contents)
f.close() | mit | Python |
e0f3e68435b406e3bad9b7f7e459b724ea832e9e | Disable summernote editor test from Travis | shoopio/shoop,shoopio/shoop,shoopio/shoop | shuup_tests/browser/admin/test_editor.py | shuup_tests/browser/admin/test_editor.py | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2018, Shuup Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from django.core.urlresolvers import reverse
from django.utils.translation import activate
from shuup import configuration
from shuup.testing import factories
from shuup.testing.browser_utils import (
click_element, move_to_element, wait_until_appeared,
wait_until_condition
)
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
@pytest.mark.skipif(os.environ.get("SHUUP_TESTS_TRAVIS", "0") == "1", reason="Disable when run through tox.")
def test_summernote_editor_picture(browser, admin_user, live_server, settings):
activate("en")
factories.get_default_shop()
factories.get_default_product_type()
factories.get_default_sales_unit()
factories.get_default_tax_class()
filer_image = factories.get_random_filer_image()
configuration.set(None, "shuup_product_tour_complete", True)
initialize_admin_browser_test(browser, live_server, settings)
browser.driver.set_window_size(1920, 1080)
url = reverse("shuup_admin:shop_product.new")
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, condition=lambda x: x.is_text_present("New shop product"))
img_icon_selector = "#id_base-description__en-editor-wrap i[class='note-icon-picture']"
move_to_element(browser, img_icon_selector)
click_element(browser, img_icon_selector)
wait_until_condition(browser, lambda b: len(b.windows) == 2)
# change to the media browser window
browser.windows.current = browser.windows[1]
# click to select the picture
wait_until_appeared(browser, "a.file-preview")
click_element(browser, "a.file-preview")
# back to the main window
wait_until_condition(browser, lambda b: len(b.windows) == 1)
browser.windows.current = browser.windows[0]
# make sure the image was added to the editor
wait_until_appeared(
browser,
"#id_base-description__en-editor-wrap .note-editable img[src='%s']" % filer_image.url, timeout=20)
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2018, Shuup Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from django.core.urlresolvers import reverse
from django.utils.translation import activate
from shuup import configuration
from shuup.testing import factories
from shuup.testing.browser_utils import (
click_element, move_to_element, wait_until_appeared,
wait_until_condition
)
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_summernote_editor_picture(browser, admin_user, live_server, settings):
activate("en")
factories.get_default_shop()
factories.get_default_product_type()
factories.get_default_sales_unit()
factories.get_default_tax_class()
filer_image = factories.get_random_filer_image()
configuration.set(None, "shuup_product_tour_complete", True)
initialize_admin_browser_test(browser, live_server, settings)
browser.driver.set_window_size(1920, 1080)
url = reverse("shuup_admin:shop_product.new")
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, condition=lambda x: x.is_text_present("New shop product"))
img_icon_selector = "#id_base-description__en-editor-wrap i[class='note-icon-picture']"
move_to_element(browser, img_icon_selector)
click_element(browser, img_icon_selector)
wait_until_condition(browser, lambda b: len(b.windows) == 2)
# change to the media browser window
browser.windows.current = browser.windows[1]
# click to select the picture
wait_until_appeared(browser, "a.file-preview")
click_element(browser, "a.file-preview")
# back to the main window
wait_until_condition(browser, lambda b: len(b.windows) == 1)
browser.windows.current = browser.windows[0]
# make sure the image was added to the editor
wait_until_appeared(
browser,
"#id_base-description__en-editor-wrap .note-editable img[src='%s']" % filer_image.url, timeout=20)
| agpl-3.0 | Python |
eda2f6905a3275623525c4179358e55e472b4fd7 | Fix bug in urls.py following the sample_list template being renamed. | woodymit/millstone,churchlab/millstone,churchlab/millstone,woodymit/millstone,churchlab/millstone,woodymit/millstone,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source | genome_designer/urls.py | genome_designer/urls.py | from django.conf.urls.defaults import include
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('',
url(r'^$', 'genome_designer.main.views.home_view'),
# Project-specific views
url(r'^projects$',
'genome_designer.main.views.project_list_view'),
url(r'^projects/([\w-]+)$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/refgenomes$',
'genome_designer.main.views.reference_genome_list_view'),
url(r'^projects/([\w-]+)/alignments$',
'genome_designer.main.views.alignment_list_view'),
url(r'^projects/([\w-]+)/sets$',
'genome_designer.main.views.variant_set_list_view'),
url(r'^projects/([\w-]+)/samples$',
'genome_designer.main.views.sample_list_view'),
url(r'^projects/([\w-]+)/variants$',
'genome_designer.main.views.variant_list_view'),
url(r'^projects/([\w-]+)/genes$',
'genome_designer.main.views.gene_list_view'),
url(r'^projects/([\w-]+)/goterms$',
'genome_designer.main.views.goterm_list_view'),
############################################################################
# Templates
############################################################################
url(r'^templates/sample_list_targets_template.tsv$',
'genome_designer.main.views.sample_list_targets_template'),
############################################################################
# Auth
############################################################################
# django-registration defaults (further delgates to django.contrib.auth.url)
(r'^accounts/', include('registration.backends.simple.urls')),
)
| from django.conf.urls.defaults import include
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('',
url(r'^$', 'genome_designer.main.views.home_view'),
# Project-specific views
url(r'^projects$',
'genome_designer.main.views.project_list_view'),
url(r'^projects/([\w-]+)$',
'genome_designer.main.views.project_view'),
url(r'^projects/([\w-]+)/refgenomes$',
'genome_designer.main.views.reference_genome_list_view'),
url(r'^projects/([\w-]+)/alignments$',
'genome_designer.main.views.alignment_list_view'),
url(r'^projects/([\w-]+)/sets$',
'genome_designer.main.views.variant_set_list_view'),
url(r'^projects/([\w-]+)/samples$',
'genome_designer.main.views.sample_list_view'),
url(r'^projects/([\w-]+)/variants$',
'genome_designer.main.views.variant_list_view'),
url(r'^projects/([\w-]+)/genes$',
'genome_designer.main.views.gene_list_view'),
url(r'^projects/([\w-]+)/goterms$',
'genome_designer.main.views.goterm_list_view'),
############################################################################
# Templates
url(r'^templates/sample_list_targets_template.tsv$',
'genome_designer.main.views.sample_list_upload_template'),
############################################################################
############################################################################
# Auth
############################################################################
# django-registration defaults (further delgates to django.contrib.auth.url)
(r'^accounts/', include('registration.backends.simple.urls')),
)
| mit | Python |
50bd1ce1118ddb52a54f679fc9faee4bc3110458 | Allow the --force command line argument to accept one or more stage names' | bjpop/rubra,magosil86/rubra | rubra/cmdline_args.py | rubra/cmdline_args.py | # Process the unix command line of the pipeline.
import argparse
from version import rubra_version
def get_cmdline_args():
return parser.parse_args()
parser = argparse.ArgumentParser(
description='A bioinformatics pipeline system.')
parser.add_argument(
'--pipeline',
metavar='PIPELINE_FILE',
type=str,
help='Your Ruffus pipeline stages (a Python module)')
parser.add_argument(
'--config',
metavar='CONFIG_FILE',
type=str,
nargs='+',
required=True,
help='One or more configuration files (Python modules)')
parser.add_argument(
'--verbose',
type=int,
choices=(0, 1, 2),
required=False,
default=1,
help='Output verbosity level: 0 = quiet; 1 = normal; \
2 = chatty (default is 1)')
parser.add_argument(
'--style',
type=str,
choices=('print', 'run', 'flowchart', 'touchfiles'),
required=False,
default='print',
help='Pipeline behaviour: print; run; touchfiles; flowchart (default is print)')
parser.add_argument(
'--force',
metavar='TASKNAME',
type=str,
required=False,
default=[],
nargs='+',
help='tasks which are forced to be out of date regardless of timestamps')
parser.add_argument(
'--end',
metavar='TASKNAME',
type=str,
required=False,
help='end points (tasks) for the pipeline')
parser.add_argument(
'--rebuild',
type=str,
choices=('fromstart', 'fromend'),
required=False,
default='fromstart',
help='rebuild outputs by working back from end tasks or forwards \
from start tasks (default is fromstart)')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + rubra_version)
| # Process the unix command line of the pipeline.
import argparse
from version import rubra_version
def get_cmdline_args():
return parser.parse_args()
parser = argparse.ArgumentParser(
description='A bioinformatics pipeline system.')
parser.add_argument(
'--pipeline',
metavar='PIPELINE_FILE',
type=str,
help='Your Ruffus pipeline stages (a Python module)')
parser.add_argument(
'--config',
metavar='CONFIG_FILE',
type=str,
nargs='+',
required=True,
help='One or more configuration files (Python modules)')
parser.add_argument(
'--verbose',
type=int,
choices=(0, 1, 2),
required=False,
default=1,
help='Output verbosity level: 0 = quiet; 1 = normal; \
2 = chatty (default is 1)')
parser.add_argument(
'--style',
type=str,
choices=('print', 'run', 'flowchart', 'touchfiles'),
required=False,
default='print',
help='Pipeline behaviour: print; run; touchfiles; flowchart (default is print)')
parser.add_argument(
'--force',
metavar='TASKNAME',
type=str,
required=False,
default=[],
help='tasks which are forced to be out of date regardless of timestamps')
parser.add_argument(
'--end',
metavar='TASKNAME',
type=str,
required=False,
help='end points (tasks) for the pipeline')
parser.add_argument(
'--rebuild',
type=str,
choices=('fromstart', 'fromend'),
required=False,
default='fromstart',
help='rebuild outputs by working back from end tasks or forwards \
from start tasks (default is fromstart)')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + rubra_version)
| mit | Python |
4009e01004ecd9b8f3d759842181b65a3893f73a | fix `TypeError: the JSON object must be str, bytes or bytearray, not NoneType` | drgarcia1986/simple-settings | simple_settings/dynamic_settings/base.py | simple_settings/dynamic_settings/base.py | # -*- coding: utf-8 -*-
import re
from copy import deepcopy
import jsonpickle
class BaseReader(object):
"""
Base class for dynamic readers
"""
_default_conf = {}
def __init__(self, conf):
self.conf = deepcopy(self._default_conf)
self.conf.update(conf)
self.key_pattern = self.conf.get('pattern')
self.auto_casting = self.conf.get('auto_casting')
self.key_prefix = self.conf.get('prefix')
def get(self, key):
if not self._is_valid_key(key):
return
result = self._get(self._qualified_key(key))
if self.auto_casting and (result is not None):
result = jsonpickle.decode(result)
return result
def set(self, key, value):
if not self._is_valid_key(key):
return
if self.auto_casting:
value = jsonpickle.encode(value)
self._set(self._qualified_key(key), value)
def _is_valid_key(self, key):
if not self.key_pattern:
return True
return bool(re.match(self.key_pattern, key))
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key)
| # -*- coding: utf-8 -*-
import re
from copy import deepcopy
import jsonpickle
class BaseReader(object):
"""
Base class for dynamic readers
"""
_default_conf = {}
def __init__(self, conf):
self.conf = deepcopy(self._default_conf)
self.conf.update(conf)
self.key_pattern = self.conf.get('pattern')
self.auto_casting = self.conf.get('auto_casting')
self.key_prefix = self.conf.get('prefix')
def get(self, key):
if not self._is_valid_key(key):
return
result = self._get(self._qualified_key(key))
if self.auto_casting:
result = jsonpickle.decode(result)
return result
def set(self, key, value):
if not self._is_valid_key(key):
return
if self.auto_casting:
value = jsonpickle.encode(value)
self._set(self._qualified_key(key), value)
def _is_valid_key(self, key):
if not self.key_pattern:
return True
return bool(re.match(self.key_pattern, key))
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
pfx = self.key_prefix if self.key_prefix is not None else ''
return '{}{}'.format(pfx, key)
| mit | Python |
a6cb3bfeb5f7201a0e702024257df1f874a3bb70 | Bump version 15. | hugovk/terroroftinytown,ArchiveTeam/terroroftinytown,hugovk/terroroftinytown,ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown,hugovk/terroroftinytown | terroroftinytown/client/__init__.py | terroroftinytown/client/__init__.py | VERSION = 15 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
| VERSION = 14 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
| mit | Python |
3f70ead379b7f586313d01d5ab617fd5368f8ce3 | Print traceback if startup fails | centrumholdings/cthulhubot | cthulhubot/management/commands/restart_masters.py | cthulhubot/management/commands/restart_masters.py | from traceback import print_exc
from django.core.management.base import BaseCommand
from cthulhubot.models import Buildmaster
class Command(BaseCommand):
help = 'Restart all Buildmaster processes'
args = ""
def handle(self, *fixture_labels, **options):
verbosity = int(options.get('verbosity', 1))
commit = int(options.get('commit', 1))
if verbosity > 1:
print 'Restarting buildmasters...'
for b in Buildmaster.objects.all():
if verbosity > 1:
print 'Handling buildmaster %s for project %s' % (str(b.id), str(b.project.name))
try:
b.stop()
except:
print 'Failed to stop master'
print_exc()
try:
b.start()
except:
print 'Failed to start master'
print_exc()
| from django.core.management.base import BaseCommand
from cthulhubot.models import Buildmaster
class Command(BaseCommand):
help = 'Restart all Buildmaster processes'
args = ""
def handle(self, *fixture_labels, **options):
verbosity = int(options.get('verbosity', 1))
commit = int(options.get('commit', 1))
if verbosity > 1:
print 'Restarting buildmasters...'
for b in Buildmaster.objects.all():
if verbosity > 1:
print 'Handling buildmaster %s for project %s' % (str(b.id), str(b.project.name))
try:
b.stop()
except:
print 'Failed to stop master'
try:
b.start()
except:
print 'Failed to start master'
| bsd-3-clause | Python |
4541b5edc808d77f53305eafca418d3be6715e8d | Cut 0.17.3 | pyinvoke/invocations | invocations/_version.py | invocations/_version.py | __version_info__ = (0, 17, 3)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (0, 17, 2)
__version__ = '.'.join(map(str, __version_info__))
| bsd-2-clause | Python |
8d70bad3968cb11c929beafcef44b023822b886f | make interval adjustable in poll_request, and also remove check_response call duplication | rackspace-titan/stacktester,rackspace-titan/stacktester | stacktester/common/http.py | stacktester/common/http.py | from stacktester import exceptions
import httplib2
import os
import time
class Client(object):
USER_AGENT = 'python-nova_test_client'
def __init__(self, host='localhost', port=80, base_url=''):
#TODO: join these more robustly
self.base_url = "http://%s:%s/%s" % (host, port, base_url)
def poll_request(self, method, url, check_response, **kwargs):
timeout = kwargs.pop('timeout', 180)
interval = kwargs.pop('interval', 2)
# Start timestamp
start_ts = int(time.time())
while True:
resp, body = self.request(method, url, **kwargs)
if (check_response(resp, body)):
break
if (int(time.time()) - start_ts >= (timeout * 1000)):
raise exceptions.TimeoutException
time.sleep(interval)
def request(self, method, url, **kwargs):
self.http_obj = httplib2.Http()
params = {}
params['headers'] = {'User-Agent': self.USER_AGENT}
params['headers'].update(kwargs.get('headers', {}))
if 'Content-Type' not in params.get('headers',{}):
params['headers']['Content-Type'] = 'application/json'
if 'body' in kwargs:
params['body'] = kwargs.get('body')
req_url = "%s/%s" % (self.base_url, url)
resp, body = self.http_obj.request(req_url, method, **params)
return resp, body
| from stacktester import exceptions
import httplib2
import os
import time
class Client(object):
USER_AGENT = 'python-nova_test_client'
def __init__(self, host='localhost', port=80, base_url=''):
#TODO: join these more robustly
self.base_url = "http://%s:%s/%s" % (host, port, base_url)
def poll_request(self, method, url, check_response, **kwargs):
timeout = kwargs.pop('timeout', 180)
# Start timestamp
start_ts = int(time.time())
resp, body = self.request(method, url, **kwargs)
while (not check_response(resp, body)):
if (int(time.time()) - start_ts >= (timeout * 1000)):
raise exceptions.TimeoutException
time.sleep(2)
resp, body = self.request(method, url, **kwargs)
def request(self, method, url, **kwargs):
self.http_obj = httplib2.Http()
params = {}
params['headers'] = {'User-Agent': self.USER_AGENT}
params['headers'].update(kwargs.get('headers', {}))
if 'Content-Type' not in kwargs.get('headers',{}):
params['headers']['Content-Type'] = 'application/json'
if 'body' in kwargs:
params['body'] = kwargs.get('body')
req_url = "%s/%s" % (self.base_url, url)
resp, body = self.http_obj.request(req_url, method, **params)
return resp, body
| apache-2.0 | Python |
a8fe56cd60296607f879dea86432532a5b40824a | Add a main method | richli/dame | dame/__init__.py | dame/__init__.py | from .dame import *
def main():
dame.main()
| mit | Python |
|
f5e65b648d632f2e75dffe7943ed3e7105b21d7f | Remove GCS patch fixed upstream in te upstream library | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | core/polyaxon/fs/gcs.py | core/polyaxon/fs/gcs.py | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gcsfs import GCSFileSystem as BaseGCSFileSystem
from polyaxon.connections.gcp.base import get_gc_credentials, get_project_id
class GCSFileSystem(BaseGCSFileSystem):
retries = 3
def get_fs(
context_path: str = None,
asynchronous: bool = False,
use_listings_cache: bool = False,
**kwargs
):
return GCSFileSystem(
project=get_project_id(context_path=context_path, **kwargs),
token=get_gc_credentials(context_path=context_path, **kwargs),
asynchronous=asynchronous,
use_listings_cache=use_listings_cache,
)
| #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import aiofiles
from gcsfs import GCSFileSystem as BaseGCSFileSystem
from gcsfs.checkers import get_consistency_checker
from gcsfs.retry import retry_request, validate_response
from polyaxon.connections.gcp.base import get_gc_credentials, get_project_id
class GCSFileSystem(BaseGCSFileSystem):
retries = 3
@retry_request(retries=retries)
async def _get_file_request(self, rpath, lpath, *args, headers=None, **kwargs):
consistency = kwargs.pop("consistency", self.consistency)
async with self.session.get(
url=rpath,
params=self._get_params(kwargs),
headers=self._get_headers(headers),
timeout=self.requests_timeout,
) as r:
r.raise_for_status()
checker = get_consistency_checker(consistency)
os.makedirs(os.path.dirname(lpath), exist_ok=True)
async with aiofiles.open(lpath, "wb") as f2:
while True:
data = await r.content.read(4096 * 32)
if not data:
break
await f2.write(data)
checker.update(data)
# validate http request
validate_response(r.status, data, rpath)
checker.validate_http_response(r) # validate file consistency
return r.status, r.headers, r.request_info, data
async def _get_file(self, rpath, lpath, callback=None, **kwargs):
# TODO: Remove when https://github.com/dask/gcsfs/issues/433 is fixed
if await self._isdir(rpath):
return
await super()._get_file(rpath, lpath, callback=callback, **kwargs)
def get_fs(
context_path: str = None,
asynchronous: bool = False,
use_listings_cache: bool = False,
**kwargs
):
return GCSFileSystem(
project=get_project_id(context_path=context_path, **kwargs),
token=get_gc_credentials(context_path=context_path, **kwargs),
asynchronous=asynchronous,
use_listings_cache=use_listings_cache,
)
| apache-2.0 | Python |
cb92a3cf67557fbd4a629601490a74bdb2119935 | add print_list method to dijkstra | NWuensche/DijkstraInPython | dijkstra.py | dijkstra.py | # -*- coding: utf-8 -*-
class Dijkstra:
def __init__(self, adj, start):
self.adj = adj
self.s = start
self.dists = [0 for x in range(len(adj))]
# Liefert minimales Element > 0
def minweight(self, verts):
return min([x for x in verts if x>0])
# Baut liste der Entfernungen von s ausgehend auf
def dist_list(self):
i = s
for v in adj[i]:
if v>0:
self.dists[adj[s].index(v)] = v
# Ausgabe der kürzesten Wege von Knoten s zu alle anderen Knoten
def print_list(self):
print("Distance from Node "+ str(adj[self.s]) + " to:" )
for node in range(len(self.adj)):
print("\t\tNode "+str(adj[node])+ ": " + str(self.dists[node]))
| # -*- coding: utf-8 -*-
class Dijkstra:
def __init__(self, adj, start):
self.adj = adj
self.s = start
self.dists = [0 for x in range(len(adj))]
# Liefert minimales Element > 0
def minweight(self, verts):
return min([x for x in verts if x>0])
# Baut liste der Entfernungen von s ausgehend auf
def dist_list(self):
i = s
for v in adj[i]:
if v>0:
self.dists[adj[s].index(v)] = v
| apache-2.0 | Python |
ed42fa81e1029633f6b6f426c437df0c55262922 | Fix LabHubApp. | jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab | jupyterlab/labhubapp.py | jupyterlab/labhubapp.py | import os
import warnings
from traitlets import default
from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
@default("default_url")
def _default_url(self):
"""when using jupyter-labhub, jupyterlab is default ui"""
return "/lab"
def init_webapp(self, *args, **kwargs):
warnings.warn(
"SingleUserLabApp is deprecated, use SingleUserNotebookApp and set " + \
"c.Spawner.default_url = '/lab' in jupyterhub_config.py", DeprecationWarning
)
super().init_webapp(*args, **kwargs)
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
| import os
from traitlets import default
from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
@default("default_url")
def _default_url(self):
"""when using jupyter-labhub, jupyterlab is default ui"""
return "/lab"
def init_webapp(self, *args, **kwargs):
warnings.warn(
"SingleUserLabApp is deprecated, use SingleUserNotebookApp and set " + "c.Spawner.default_url = '/lab' in jupyterhub_config.py", DeprecationWarning
)
super().init_webapp(*args, **kwargs)
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
f4c1093616d08bd4abcb5ddc030b59d863dcec05 | Change netapi to use processmanager | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/client/netapi.py | salt/client/netapi.py | # encoding: utf-8
'''
The main entry point for salt-api
'''
# Import python libs
import logging
import multiprocessing
import signal
import os
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
| # encoding: utf-8
'''
The main entry point for salt-api
'''
# Import python libs
import logging
import multiprocessing
import signal
import os
# Import salt-api libs
import salt.loader
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
# pid -> {fun: foo, Process: object}
self.pid_map = {}
self.netapi = salt.loader.netapi(self.opts)
def add_process(self, fun):
'''
Start a netapi child process of "fun"
'''
p = multiprocessing.Process(target=self.netapi[fun])
p.start()
logger.info("Started '{0}' api module with pid {1}".format(fun, p.pid))
self.pid_map[p.pid] = {'fun': fun,
'Process': p}
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
self.add_process(fun)
# make sure to kill the subprocesses if the parent is killed
signal.signal(signal.SIGTERM, self.kill_children)
while True:
pid, exit_status = os.wait()
if pid not in self.pid_map:
logger.info(('Process of pid {0} died, not a known netapi'
' process, will not restart').format(pid))
continue
logger.info(('Process {0} ({1}) died with exit status {2},'
' restarting...').format(self.pid_map[pid]['fun'],
pid,
exit_status))
self.pid_map[pid]['Process'].join(1)
self.add_process(self.pid_map[pid]['fun'])
del self.pid_map[pid]
def kill_children(self, *args):
'''
Kill all of the children
'''
for pid, p_map in self.pid_map.items():
p_map['Process'].terminate()
p_map['Process'].join()
del self.pid_map[pid]
| apache-2.0 | Python |
89f8d0ebe01e188b5a043dfbf891cf3a3bca0504 | Clarify that event is sent up to the master | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/modules/event.py | salt/modules/event.py | '''
Fire events on the minion, events can be fired up to the master
'''
# Import salt libs
import salt.crypt
import salt.utils.event
import salt.payload
def fire_master(data, tag):
'''
Fire an event off up to the master server
CLI Example::
salt '*' event.fire_master 'stuff to be in the event' 'tag'
'''
load = {'id': __opts__['id'],
'tag': tag,
'data': data,
'cmd': '_minion_event'}
auth = salt.crypt.SAuth(__opts__)
sreq = salt.payload.SREQ(__opts__['master_uri'])
try:
sreq.send('aes', auth.crypticle.dumps(load))
except Exception:
pass
return True
def fire(data, tag):
'''
Fire an event on the local minion event bus
CLI Example::
salt '*' event.fire 'stuff to be in the event' 'tag'
'''
return salt.utils.event.MinionEvent(**__opts__).fire_event(data, tag)
| '''
Fire events on the minion, events can be fired up to the master
'''
# Import salt libs
import salt.crypt
import salt.utils.event
import salt.payload
def fire_master(data, tag):
'''
Fire an event off on the master server
CLI Example::
salt '*' event.fire_master 'stuff to be in the event' 'tag'
'''
load = {'id': __opts__['id'],
'tag': tag,
'data': data,
'cmd': '_minion_event'}
auth = salt.crypt.SAuth(__opts__)
sreq = salt.payload.SREQ(__opts__['master_uri'])
try:
sreq.send('aes', auth.crypticle.dumps(load))
except Exception:
pass
return True
def fire(data, tag):
'''
Fire an event on the local minion event bus
CLI Example::
salt '*' event.fire 'stuff to be in the event' 'tag'
'''
return salt.utils.event.MinionEvent(**__opts__).fire_event(data, tag)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.