repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
gangadhar-kadam/helpdesk-frappe | refs/heads/develop | frappe/core/doctype/file/test_file.py | 13 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils.file_manager import save_file, get_files_path
from frappe import _
from frappe.core.doctype.file.file import move_file
# test_records = frappe.get_test_records('File')
class TestFile(unittest.TestCase):
def setUp(self):
self.delete_test_data()
self.upload_file()
def tearDown(self):
try:
frappe.get_doc("File", {"file_name": "file_copy.txt"}).delete()
except frappe.DoesNotExistError:
pass
def delete_test_data(self):
for f in frappe.db.sql('''select name, file_name from tabFile where
is_home_folder = 0 and is_attachments_folder = 0 order by rgt-lft asc'''):
frappe.delete_doc("File", f[0])
def upload_file(self):
self.saved_file = save_file('file_copy.txt', "Testing file copy example.",\
"", "", self.get_folder("Test Folder 1", "Home").name)
self.saved_filename = get_files_path(self.saved_file.file_name)
def get_folder(self, folder_name, parent_folder="Home"):
return frappe.get_doc({
"doctype": "File",
"file_name": _(folder_name),
"is_folder": 1,
"folder": _(parent_folder)
}).insert()
def tests_after_upload(self):
self.assertEqual(self.saved_file.folder, _("Home/Test Folder 1"))
folder_size = frappe.db.get_value("File", _("Home/Test Folder 1"), "file_size")
saved_file_size = frappe.db.get_value("File", self.saved_file.name, "file_size")
self.assertEqual(folder_size, saved_file_size)
def test_file_copy(self):
folder = self.get_folder("Test Folder 2", "Home")
file = frappe.get_doc("File", {"file_name":"file_copy.txt"})
move_file([{"name": file.name}], folder.name, file.folder)
file = frappe.get_doc("File", {"file_name":"file_copy.txt"})
self.assertEqual(_("Home/Test Folder 2"), file.folder)
self.assertEqual(frappe.db.get_value("File", _("Home/Test Folder 2"), "file_size"), file.file_size)
self.assertEqual(frappe.db.get_value("File", _("Home/Test Folder 1"), "file_size"), 0)
def test_folder_copy(self):
folder = self.get_folder("Test Folder 2", "Home")
folder = self.get_folder("Test Folder 3", "Home/Test Folder 2")
self.saved_file = save_file('folder_copy.txt', "Testing folder copy example.", "", "", folder.name)
move_file([{"name": folder.name}], 'Home/Test Folder 1', folder.folder)
file = frappe.get_doc("File", {"file_name":"folder_copy.txt"})
file_copy_txt = frappe.get_value("File", {"file_name":"file_copy.txt"})
if file_copy_txt:
frappe.get_doc("File", file_copy_txt).delete()
self.assertEqual(_("Home/Test Folder 1/Test Folder 3"), file.folder)
self.assertEqual(frappe.db.get_value("File", _("Home/Test Folder 1"), "file_size"), file.file_size)
self.assertEqual(frappe.db.get_value("File", _("Home/Test Folder 2"), "file_size"), 0)
def test_non_parent_folder(self):
d = frappe.get_doc({
"doctype": "File",
"file_name": _("Test_Folder"),
"is_folder": 1
})
self.assertRaises(frappe.ValidationError, d.save)
def test_on_delete(self):
file = frappe.get_doc("File", {"file_name":"file_copy.txt"})
file.delete()
self.assertEqual(frappe.db.get_value("File", _("Home/Test Folder 1"), "file_size"), 0)
folder = self.get_folder("Test Folder 3", "Home/Test Folder 1")
self.saved_file = save_file('folder_copy.txt', "Testing folder copy example.", "", "", folder.name)
folder = frappe.get_doc("File", "Home/Test Folder 1/Test Folder 3")
self.assertRaises(frappe.ValidationError, folder.delete)
|
alanwells/donkey | refs/heads/master | donkeycar/parts/actuators/actuators.py | 1 | """
actuators.py
Classes to control the motors and servos. These classes
are wrapped in a mixer class before being used in the drive loop.
"""
import time
from ... import utils
class PCA9685:
'''
PWM motor controler using PCA9685 boards.
This is used for most RC Cars
'''
def __init__(self, channel, frequency=60):
import Adafruit_PCA9685
# Initialise the PCA9685 using the default address (0x40).
self.pwm = Adafruit_PCA9685.PCA9685()
self.pwm.set_pwm_freq(frequency)
self.channel = channel
def set_pulse(self, pulse):
self.pwm.set_pwm(self.channel, 0, pulse)
def run(self, pulse):
self.set_pulse(pulse)
class PWMSteering:
"""
Wrapper over a PWM motor cotnroller to convert angles to PWM pulses.
"""
LEFT_ANGLE = -1
RIGHT_ANGLE = 1
def __init__(self, controller=None,
left_pulse=290,
right_pulse=490):
self.controller = controller
self.left_pulse = left_pulse
self.right_pulse = right_pulse
def run(self, angle):
#map absolute angle to angle that vehicle can implement.
pulse = utils.map_range(angle,
self.LEFT_ANGLE, self.RIGHT_ANGLE,
self.left_pulse, self.right_pulse)
self.controller.set_pulse(pulse)
def shutdown(self):
self.run(0) #set steering straight
class PWMThrottle:
"""
Wrapper over a PWM motor cotnroller to convert -1 to 1 throttle
values to PWM pulses.
"""
MIN_THROTTLE = -1
MAX_THROTTLE = 1
def __init__(self, controller=None,
max_pulse=300,
min_pulse=490,
zero_pulse=350):
self.controller = controller
self.max_pulse = max_pulse
self.min_pulse = min_pulse
self.zero_pulse = zero_pulse
#send zero pulse to calibrate ESC
self.controller.set_pulse(self.zero_pulse)
time.sleep(1)
def run(self, throttle):
if throttle > 0:
pulse = utils.map_range(throttle,
0, self.MAX_THROTTLE,
self.zero_pulse, self.max_pulse)
else:
pulse = utils.map_range(throttle,
self.MIN_THROTTLE, 0,
self.min_pulse, self.zero_pulse)
self.controller.set_pulse(pulse)
def shutdown(self):
self.run(0) #stop vehicle
class Adafruit_DCMotor_Hat:
'''
Adafruit DC Motor Controller
Used for each motor on a differential drive car.
'''
def __init__(self, motor_num):
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
import atexit
self.FORWARD = Adafruit_MotorHAT.FORWARD
self.BACKWARD = Adafruit_MotorHAT.BACKWARD
self.mh = Adafruit_MotorHAT(addr=0x60)
self.motor = self.mh.getMotor(motor_num)
self.motor_num = motor_num
atexit.register(self.turn_off_motors)
self.speed = 0
self.throttle = 0
def run(self, speed):
'''
Update the speed of the motor where 1 is full forward and
-1 is full backwards.
'''
if speed > 1 or speed < -1:
raise ValueError( "Speed must be between 1(forward) and -1(reverse)")
self.speed = speed
self.throttle = int(utils.map_range(abs(speed), -1, 1, -255, 255))
if speed > 0:
self.motor.run(self.FORWARD)
else:
self.motor.run(self.BACKWARD)
self.motor.setSpeed(self.throttle)
def shutdown(self):
self.mh.getMotor(self.motor_num).run(Adafruit_MotorHAT.RELEASE)
class Maestro:
'''
Pololu Maestro Servo controller
Use the MaestroControlCenter to set the speed & acceleration values to 0!
'''
import threading
maestro_device = None
astar_device = None
maestro_lock = threading.Lock()
astar_lock = threading.Lock()
def __init__(self, channel, frequency = 60):
import serial
if Maestro.maestro_device == None:
Maestro.maestro_device = serial.Serial('/dev/ttyACM0', 115200)
self.channel = channel
self.frequency = frequency
self.lturn = False
self.rturn = False
self.headlights = False
self.brakelights = False
if Maestro.astar_device == None:
Maestro.astar_device = serial.Serial('/dev/ttyACM2', 115200, timeout= 0.01)
def set_pulse(self, pulse):
# Recalculate pulse width from the Adafruit values
w = pulse * (1 / (self.frequency * 4096)) # in seconds
w *= 1000 * 1000 # in microseconds
w *= 4 # in quarter microsenconds the maestro wants
w = int(w)
with Maestro.maestro_lock:
Maestro.maestro_device.write(bytearray([ 0x84,
self.channel,
(w & 0x7F),
((w >> 7) & 0x7F)]))
def set_turn_left(self, v):
if self.lturn != v:
self.lturn = v
b = bytearray('L' if v else 'l', 'ascii')
with Maestro.astar_lock:
Maestro.astar_device.write(b)
def set_turn_right(self, v):
if self.rturn != v:
self.rturn = v
b = bytearray('R' if v else 'r', 'ascii')
with Maestro.astar_lock:
Maestro.astar_device.write(b)
def set_headlight(self, v):
if self.headlights != v:
self.headlights = v
b = bytearray('H' if v else 'h', 'ascii')
with Maestro.astar_lock:
Maestro.astar_device.write(b)
def set_brake(self, v):
if self.brakelights != v:
self.brakelights = v
b = bytearray('B' if v else 'b', 'ascii')
with Maestro.astar_lock:
Maestro.astar_device.write(b)
def readline(self):
ret = None
with Maestro.astar_lock:
# expecting lines like
# E n nnn n
if Maestro.astar_device.inWaiting() > 8:
ret = Maestro.astar_device.readline()
if ret != None:
ret = ret.rstrip()
return ret
class Teensy:
'''
Teensy Servo controller
'''
import threading
teensy_device = None
astar_device = None
teensy_lock = threading.Lock()
astar_lock = threading.Lock()
def __init__(self, channel, frequency = 60):
import serial
if Teensy.teensy_device == None:
Teensy.teensy_device = serial.Serial('/dev/teensy', 115200, timeout = 0.01)
self.channel = channel
self.frequency = frequency
self.lturn = False
self.rturn = False
self.headlights = False
self.brakelights = False
if Teensy.astar_device == None:
Teensy.astar_device = serial.Serial('/dev/astar', 115200, timeout = 0.01)
def set_pulse(self, pulse):
# Recalculate pulse width from the Adafruit values
w = pulse * (1 / (self.frequency * 4096)) # in seconds
w *= 1000 * 1000 # in microseconds
with Teensy.teensy_lock:
Teensy.teensy_device.write(("%c %.1f\n" % (self.channel, w)).encode('ascii'))
def set_turn_left(self, v):
if self.lturn != v:
self.lturn = v
b = bytearray('L' if v else 'l', 'ascii')
with Teensy.astar_lock:
Teensy.astar_device.write(b)
def set_turn_right(self, v):
if self.rturn != v:
self.rturn = v
b = bytearray('R' if v else 'r', 'ascii')
with Teensy.astar_lock:
Teensy.astar_device.write(b)
def set_headlight(self, v):
if self.headlights != v:
self.headlights = v
b = bytearray('H' if v else 'h', 'ascii')
with Teensy.astar_lock:
Teensy.astar_device.write(b)
def set_brake(self, v):
if self.brakelights != v:
self.brakelights = v
b = bytearray('B' if v else 'b', 'ascii')
with Teensy.astar_lock:
Teensy.astar_device.write(b)
def teensy_readline(self):
ret = None
with Teensy.teensy_lock:
# expecting lines like
# E n nnn n
if Teensy.teensy_device.inWaiting() > 8:
ret = Teensy.teensy_device.readline()
if ret != None:
ret = ret.rstrip()
return ret
def astar_readline(self):
ret = None
with Teensy.astar_lock:
# expecting lines like
# E n nnn n
if Teensy.astar_device.inWaiting() > 8:
ret = Teensy.astar_device.readline()
if ret != None:
ret = ret.rstrip()
return ret
class GPIOPinOutput:
'''
Toggle a GPIO pin based on a given condition
High (on) if condition is true, low (off) if condition is false.
Good for LEDs
'''
def __init__(self, pin):
import RPi.GPIO as GPIO
self.gpio = GPIO
self.pin = pin
self.gpio.setmode(self.gpio.BCM)
self.gpio.setup(self.pin, self.gpio.OUT)
self.gpio.output(self.pin, self.gpio.LOW)
def toggle(self, condition):
if condition:
self.gpio.output(self.pin, self.gpio.HIGH)
else:
self.gpio.output(self.pin, self.gpio.LOW)
def run(self, condition):
self.toggle(condition)
def shutdown(self):
self.gpio.cleanup()
class MockController(object):
def __init__(self):
pass
def run(self, pulse):
pass
def shutdown(self):
pass
|
stu9arkin/PSSSSTTTT | refs/heads/master | client.py | 1 | from socket import *
from tkinter import *
import threading
def sendMessage(): #two send message functions because return type requires (event) parameters
message = e.get() # -which is not compatible with the first sendMessage call in send button
s.send(message.encode())
def sendMessageEnter(event):
message = e.get()
s.send(message.encode())
input_user.set('')
def receiveMessage(): #try/except doesn't fix crash here
while True:
reply = s.recv(1024)
reply = reply.decode()
print("Recieved ", repr(reply)) #just to test outside the interface
messages.insert(INSERT, '%s\n' % reply)
try:
s = socket(AF_INET, SOCK_STREAM)
s.connect(("82.27.169.93", 1337))
except:
print('Connection failed.')
#Set-up of interface
root = Tk()
root.title("Psst Chat Client")
root.geometry("500x400")
tk_rgb = '#212121'
tk_rgb2 = '#424242'
tk_rgb3 = '#E0E0E0'
root.configure(background=tk_rgb)
#frame for send button
f = Frame(root, height=32, width=100)
f.pack_propagate(0) # don't shrink
f.place(x=395,y=362)
#send button
b = Button(f, text="Send",command=sendMessage, bg=tk_rgb2, fg='white', font=("Helvetica", 9))
b.pack(fill=BOTH, expand=1)
#frame for entry text box
f1 = Frame(root, height=32, width=400)
f1.pack_propagate(0) # don't shrink
f1.place(x=5,y=362)
#entry text box
input_user = StringVar()
e = Entry(f1, bg=tk_rgb3, text=input_user)
e.pack(fill=BOTH, expand=1)
e.delete(0, END)
e.bind("<Return>", sendMessageEnter)
#frame for text window?
t = Frame(root, height=300, width=395)
t.place(x=10,y=10)
#actual text box
messages = Text(t, bg=tk_rgb3)
messages.place(x=0,y=0)
#Threading
newThread = threading.Thread(target=receiveMessage)
newThread.start()
root.mainloop()
|
netzkolchose/django-cms | refs/heads/develop | cms/urls.py | 7 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import include, url
from cms.apphook_pool import apphook_pool
from cms.appresolver import get_app_patterns
from cms.constants import SLUG_REGEXP
from cms.views import details
if settings.APPEND_SLASH:
regexp = r'^(?P<slug>%s)/$' % SLUG_REGEXP
else:
regexp = r'^(?P<slug>%s)$' % SLUG_REGEXP
if apphook_pool.get_apphooks():
# If there are some application urls, use special resolver,
# so we will have standard reverse support.
urlpatterns = get_app_patterns()
else:
urlpatterns = []
urlpatterns.extend([
url(r'^cms_wizard/', include('cms.wizards.urls')),
url(regexp, details, name='pages-details-by-slug'),
url(r'^$', details, {'slug': ''}, name='pages-root'),
])
|
rueycheng/wp-download | refs/heads/master | setup.py | 1 | # -*- coding: UTF-8 -*-
#!/usr/bin/env python
import os
from distutils.core import setup
from glob import glob
setup(name='wp-download',
version='0.1.2b',
description='Wikipedia database dump downloader',
author='Wolodja Wentland',
author_email='wentland@cl.uni-heidelberg.de',
url='http://github.com/babilen/wp-download',
license='GPLv3',
scripts=['scripts/wp-download'],
long_description = open('doc/description.rst').read(),
packages=['wp_download'],
package_dir={'': 'lib'},
data_files=[
('share/doc/wp-download/examples/', ['examples/wpdownloadrc.sample']),
('share/doc/wp-download/doc', ['doc/Makefile','doc/README']),
('share/doc/wp-download/doc/rst', ['doc/rst/index.rst',
'doc/rst/conf.py']),
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Topic :: Database',
'Topic :: Scientific/Engineering',
]
)
|
tgcmteam/tgcmlinux | refs/heads/master | test/many_top_windows.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Authors : David Castellanos <dcastellanos@indra.es>
#
# Copyright (c) 2012, Telefonica Móviles España S.A.U.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this library; if not, write to the Free
# Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
def create_window(text):
window = gtk.Window()
vbox = gtk.VBox()
vbox.set_border_width(10)
vbox.set_spacing(6)
window.add(vbox)
label = gtk.Label(text)
vbox.add(label)
button = gtk.Button('foo')
vbox.add(button)
window.show_all()
button.connect('clicked', button_callback, window)
return window
def button_callback(widget, parent=None):
dialog = gtk.MessageDialog(parent=parent, flags=gtk.DIALOG_MODAL, buttons=gtk.BUTTONS_OK)
dialog.set_markup('Lorem ipsum alea jacta est')
dialog.run()
def main():
window_group = gtk.WindowGroup()
main_window = create_window('main_window')
window_group.add_window(main_window)
for i in range(0, 3):
child_window = create_window('child window #%d' % i)
child_window.set_transient_for(main_window)
window_group.add_window(child_window)
gtk.main()
if __name__ == '__main__':
main()
|
jfinkels/networkx | refs/heads/master | networkx/algorithms/approximation/clique.py | 11 | # -*- coding: utf-8 -*-
"""
Cliques.
"""
# Copyright (C) 2011-2012 by
# Nicholas Mancuso <nick.mancuso@gmail.com>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.algorithms.approximation import ramsey
__author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)"""
__all__ = ["clique_removal","max_clique"]
def max_clique(G):
r"""Find the Maximum Clique
Finds the `O(|V|/(log|V|)^2)` apx of maximum clique/independent set
in the worst case.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
clique : set
The apx-maximum clique of the graph
Notes
------
A clique in an undirected graph G = (V, E) is a subset of the vertex set
`C \subseteq V`, such that for every two vertices in C, there exists an edge
connecting the two. This is equivalent to saying that the subgraph
induced by C is complete (in some cases, the term clique may also refer
to the subgraph).
A maximum clique is a clique of the largest possible size in a given graph.
The clique number `\omega(G)` of a graph G is the number of
vertices in a maximum clique in G. The intersection number of
G is the smallest number of cliques that together cover all edges of G.
http://en.wikipedia.org/wiki/Maximum_clique
References
----------
.. [1] Boppana, R., & Halldórsson, M. M. (1992).
Approximating maximum independent sets by excluding subgraphs.
BIT Numerical Mathematics, 32(2), 180–196. Springer.
doi:10.1007/BF01994876
"""
if G is None:
raise ValueError("Expected NetworkX graph!")
# finding the maximum clique in a graph is equivalent to finding
# the independent set in the complementary graph
cgraph = nx.complement(G)
iset, _ = clique_removal(cgraph)
return iset
def clique_removal(G):
""" Repeatedly remove cliques from the graph.
Results in a `O(|V|/(\log |V|)^2)` approximation of maximum clique
& independent set. Returns the largest independent set found, along
with found maximal cliques.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
max_ind_cliques : (set, list) tuple
Maximal independent set and list of maximal cliques (sets) in the graph.
References
----------
.. [1] Boppana, R., & Halldórsson, M. M. (1992).
Approximating maximum independent sets by excluding subgraphs.
BIT Numerical Mathematics, 32(2), 180–196. Springer.
"""
graph = G.copy(with_data=False)
c_i, i_i = ramsey.ramsey_R2(graph)
cliques = [c_i]
isets = [i_i]
while graph:
graph.remove_nodes_from(c_i)
c_i, i_i = ramsey.ramsey_R2(graph)
if c_i:
cliques.append(c_i)
if i_i:
isets.append(i_i)
# Determine the largest independent set as measured by cardinality.
maxiset = max(isets, key=len)
return maxiset, cliques
|
apocquet/django | refs/heads/master | tests/model_validation/tests.py | 292 | from django.core import management
from django.core.checks import Error, run_checks
from django.db.models.signals import post_init
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.utils import six
class OnPostInit(object):
def __call__(self, **kwargs):
pass
def on_post_init(**kwargs):
pass
@override_settings(
INSTALLED_APPS=['django.contrib.auth', 'django.contrib.contenttypes'],
SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True)
)
class ModelValidationTest(SimpleTestCase):
def test_models_validate(self):
# All our models should validate properly
# Validation Tests:
# * choices= Iterable of Iterables
# See: https://code.djangoproject.com/ticket/20430
# * related_name='+' doesn't clash with another '+'
# See: https://code.djangoproject.com/ticket/21375
management.call_command("check", stdout=six.StringIO())
def test_model_signal(self):
unresolved_references = post_init.unresolved_references.copy()
post_init.connect(on_post_init, sender='missing-app.Model')
post_init.connect(OnPostInit(), sender='missing-app.Model')
errors = run_checks()
expected = [
Error(
"The 'on_post_init' function was connected to the 'post_init' "
"signal with a lazy reference to the 'missing-app.Model' "
"sender, which has not been installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
),
Error(
"An instance of the 'OnPostInit' class was connected to "
"the 'post_init' signal with a lazy reference to the "
"'missing-app.Model' sender, which has not been installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
)
]
self.assertEqual(errors, expected)
post_init.unresolved_references = unresolved_references
|
w1z2g3/crossbar | refs/heads/master | crossbar/worker/test/examples/goodclass.py | 4 | #####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import, division
from autobahn.twisted.wamp import ApplicationSession
from autobahn.wamp.types import PublishOptions
from twisted.internet.defer import inlineCallbacks
_ = []
class AppSession(ApplicationSession):
@inlineCallbacks
def onJoin(self, details):
yield self.subscribe(_.append, "com.test")
yield self.publish("com.test", "woo",
options=PublishOptions(exclude_me=False))
|
alanthai/django-guardian | refs/heads/master | guardian/migrations/0002_auto__add_field_groupobjectpermission_object_pk__add_field_userobjectp.py | 85 | # encoding: utf-8
from south.db import db
from south.v2 import SchemaMigration
from guardian.compat import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GroupObjectPermission.object_pk'
db.add_column('guardian_groupobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
# Adding field 'UserObjectPermission.object_pk'
db.add_column('guardian_userobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
def backwards(self, orm):
# Deleting field 'GroupObjectPermission.object_pk'
db.delete_column('guardian_groupobjectpermission', 'object_pk')
# Deleting field 'UserObjectPermission.object_pk'
db.delete_column('guardian_userobjectpermission', 'object_pk')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'guardian.groupobjectpermission': {
'Meta': {'unique_together': "(['group', 'permission', 'content_type', 'object_id'],)", 'object_name': 'GroupObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_pk': ('django.db.models.fields.TextField', [], {'default': "''"}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"})
},
'guardian.userobjectpermission': {
'Meta': {'unique_together': "(['user', 'permission', 'content_type', 'object_id'],)", 'object_name': 'UserObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_pk': ('django.db.models.fields.TextField', [], {'default': "''"}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_model_label})
}
}
complete_apps = ['guardian']
|
Az107/advancedapt-get | refs/heads/master | apt-getGUI/data/aa-g.py | 1 | #!/usr/bin/python
import easygui as gui
import os
import sys
#start here
title = " advanced apt-get"
if os.geteuid() != 0:
print 'error dont have root acces.'
sys.exit(1)
task = gui.buttonbox(title=title,
msg="choice a task", choices=("install", "remove", "upgrade"))
if task == "upgrade":
command = "sudo apt-get upgrade"
elif task == "None":
exit()
elif (task == "install") or (task == "remove"):
pack = gui.enterbox(title=title,
msg="write a packet to work")
command = "sudo apt-get " + task + " " + pack + " -y"
else:
exit()
#execute
os.system(command)
fin = gui.msgbox(title=title ,
msg="task completed", ok_button="finish")
if fin == "finish":
exit()
|
sunlianqiang/kbengine | refs/heads/master | kbe/src/lib/python/Lib/_weakrefset.py | 169 | # Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard:
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
|
dotcloud/zerorpc-python | refs/heads/master | zerorpc/__init__.py | 20 | # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2015 François-Xavier Bourlet (bombela+zerorpc@gmail.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Tell flake8 to ignore this file (otherwise it will complain about import *)
# flake8: noqa
from .version import *
from .exceptions import *
from .context import *
from .socket import *
from .channel import *
from .events import *
from .core import *
from .heartbeat import *
from .decorators import *
|
vincepandolfo/django | refs/heads/master | tests/files/tests.py | 20 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gzip
import os
import struct
import tempfile
import unittest
from io import BytesIO, StringIO
from django.core.files import File
from django.core.files.base import ContentFile
from django.core.files.move import file_move_safe
from django.core.files.temp import NamedTemporaryFile
from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile
from django.test import mock
from django.utils import six
from django.utils._os import upath
try:
from PIL import Image
except ImportError:
Image = None
else:
from django.core.files import images
class FileTests(unittest.TestCase):
def test_unicode_uploadedfile_name(self):
uf = UploadedFile(name='¿Cómo?', content_type='text')
self.assertIs(type(repr(uf)), str)
def test_unicode_file_name(self):
f = File(None, 'djángö')
self.assertIs(type(repr(f)), str)
def test_context_manager(self):
orig_file = tempfile.TemporaryFile()
base_file = File(orig_file)
with base_file as f:
self.assertIs(base_file, f)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
self.assertTrue(orig_file.closed)
def test_namedtemporaryfile_closes(self):
"""
The symbol django.core.files.NamedTemporaryFile is assigned as
a different class on different operating systems. In
any case, the result should minimally mock some of the API of
tempfile.NamedTemporaryFile from the Python standard library.
"""
tempfile = NamedTemporaryFile()
self.assertTrue(hasattr(tempfile, "closed"))
self.assertFalse(tempfile.closed)
tempfile.close()
self.assertTrue(tempfile.closed)
def test_file_mode(self):
# Should not set mode to None if it is not present.
# See #14681, stdlib gzip module crashes if mode is set to None
file = SimpleUploadedFile("mode_test.txt", b"content")
self.assertFalse(hasattr(file, 'mode'))
gzip.GzipFile(fileobj=file)
def test_file_iteration(self):
"""
File objects should yield lines when iterated over.
Refs #22107.
"""
file = File(BytesIO(b'one\ntwo\nthree'))
self.assertEqual(list(file), [b'one\n', b'two\n', b'three'])
def test_file_iteration_windows_newlines(self):
"""
#8149 - File objects with \r\n line endings should yield lines
when iterated over.
"""
f = File(BytesIO(b'one\r\ntwo\r\nthree'))
self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three'])
def test_file_iteration_mac_newlines(self):
"""
#8149 - File objects with \r line endings should yield lines
when iterated over.
"""
f = File(BytesIO(b'one\rtwo\rthree'))
self.assertEqual(list(f), [b'one\r', b'two\r', b'three'])
def test_file_iteration_mixed_newlines(self):
f = File(BytesIO(b'one\rtwo\nthree\r\nfour'))
self.assertEqual(list(f), [b'one\r', b'two\n', b'three\r\n', b'four'])
def test_file_iteration_with_unix_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\ntwo\nthree'))
# Set chunk size to create a boundary after \n:
# b'one\n...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\n', b'two\n', b'three'])
def test_file_iteration_with_windows_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\r\ntwo\r\nthree'))
# Set chunk size to create a boundary between \r and \n:
# b'one\r\n...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three'])
def test_file_iteration_with_mac_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\rtwo\rthree'))
# Set chunk size to create a boundary after \r:
# b'one\r...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\r', b'two\r', b'three'])
def test_file_iteration_with_text(self):
f = File(StringIO('one\ntwo\nthree'))
self.assertEqual(list(f), ['one\n', 'two\n', 'three'])
def test_seekable(self):
"""
File.seekable() should be available on Python 3.
"""
with tempfile.TemporaryFile() as temp:
temp.write(b"contents\n")
test_file = File(temp, name="something.txt")
if six.PY2:
self.assertFalse(hasattr(test_file, 'seekable'))
if six.PY3:
self.assertTrue(hasattr(test_file, 'seekable'))
self.assertTrue(test_file.seekable())
class NoNameFileTestCase(unittest.TestCase):
"""
Other examples of unnamed files may be tempfile.SpooledTemporaryFile or
urllib.urlopen()
"""
def test_noname_file_default_name(self):
self.assertEqual(File(BytesIO(b'A file with no name')).name, None)
def test_noname_file_get_size(self):
self.assertEqual(File(BytesIO(b'A file with no name')).size, 19)
class ContentFileTestCase(unittest.TestCase):
def test_content_file_default_name(self):
self.assertEqual(ContentFile(b"content").name, None)
def test_content_file_custom_name(self):
"""
Test that the constructor of ContentFile accepts 'name' (#16590).
"""
name = "I can have a name too!"
self.assertEqual(ContentFile(b"content", name=name).name, name)
def test_content_file_input_type(self):
"""
Test that ContentFile can accept both bytes and unicode and that the
retrieved content is of the same type.
"""
self.assertIsInstance(ContentFile(b"content").read(), bytes)
if six.PY3:
self.assertIsInstance(ContentFile("español").read(), six.text_type)
else:
self.assertIsInstance(ContentFile("español").read(), bytes)
class DimensionClosingBug(unittest.TestCase):
"""
Test that get_image_dimensions() properly closes files (#8817)
"""
@unittest.skipUnless(Image, "Pillow not installed")
def test_not_closing_of_files(self):
"""
Open files passed into get_image_dimensions() should stay opened.
"""
empty_io = BytesIO()
try:
images.get_image_dimensions(empty_io)
finally:
self.assertTrue(not empty_io.closed)
@unittest.skipUnless(Image, "Pillow not installed")
def test_closing_of_filenames(self):
"""
get_image_dimensions() called with a filename should closed the file.
"""
# We need to inject a modified open() builtin into the images module
# that checks if the file was closed properly if the function is
# called with a filename instead of an file object.
# get_image_dimensions will call our catching_open instead of the
# regular builtin one.
class FileWrapper(object):
_closed = []
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return getattr(self.f, name)
def close(self):
self._closed.append(True)
self.f.close()
def catching_open(*args):
return FileWrapper(open(*args))
images.open = catching_open
try:
images.get_image_dimensions(os.path.join(os.path.dirname(upath(__file__)), "test1.png"))
finally:
del images.open
self.assertTrue(FileWrapper._closed)
class InconsistentGetImageDimensionsBug(unittest.TestCase):
"""
Test that get_image_dimensions() works properly after various calls
using a file handler (#11158)
"""
@unittest.skipUnless(Image, "Pillow not installed")
def test_multiple_calls(self):
"""
Multiple calls of get_image_dimensions() should return the same size.
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with open(img_path, 'rb') as fh:
image = images.ImageFile(fh)
image_pil = Image.open(fh)
size_1 = images.get_image_dimensions(image)
size_2 = images.get_image_dimensions(image)
self.assertEqual(image_pil.size, size_1)
self.assertEqual(size_1, size_2)
@unittest.skipUnless(Image, "Pillow not installed")
def test_bug_19457(self):
"""
Regression test for #19457
get_image_dimensions fails on some pngs, while Image.size is working good on them
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "magic.png")
size = images.get_image_dimensions(img_path)
with open(img_path, 'rb') as fh:
self.assertEqual(size, Image.open(fh).size)
@unittest.skipUnless(Image, "Pillow not installed")
class GetImageDimensionsTests(unittest.TestCase):
def test_invalid_image(self):
"""
get_image_dimensions() should return (None, None) for the dimensions of
invalid images (#24441).
brokenimg.png is not a valid image and it has been generated by:
$ echo "123" > brokenimg.png
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "brokenimg.png")
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (None, None))
def test_valid_image(self):
"""
get_image_dimensions() should catch struct.error while feeding the PIL
Image parser (#24544).
Emulates the Parser feed error. Since the error is raised on every feed
attempt, the resulting image size should be invalid: (None, None).
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with mock.patch('PIL.ImageFile.Parser.feed', side_effect=struct.error):
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (None, None))
class FileMoveSafeTests(unittest.TestCase):
def test_file_move_overwrite(self):
handle_a, self.file_a = tempfile.mkstemp()
handle_b, self.file_b = tempfile.mkstemp()
# file_move_safe should raise an IOError exception if destination file exists and allow_overwrite is False
with self.assertRaises(IOError):
file_move_safe(self.file_a, self.file_b, allow_overwrite=False)
# should allow it and continue on if allow_overwrite is True
self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True))
os.close(handle_a)
os.close(handle_b)
class SpooledTempTests(unittest.TestCase):
def test_in_memory_spooled_temp(self):
with tempfile.SpooledTemporaryFile() as temp:
temp.write(b"foo bar baz quux\n")
django_file = File(temp, name="something.txt")
self.assertEqual(django_file.size, 17)
def test_written_spooled_temp(self):
with tempfile.SpooledTemporaryFile(max_size=4) as temp:
temp.write(b"foo bar baz quux\n")
django_file = File(temp, name="something.txt")
self.assertEqual(django_file.size, 17)
|
Sorsly/subtle | refs/heads/master | google-cloud-sdk/lib/googlecloudsdk/third_party/apis/cloudresourcemanager/v2alpha1/__init__.py | 415 | """Package marker file."""
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
|
ossdemura/django-miniblog | refs/heads/dev | Lib/site-packages/django/core/cache/__init__.py | 51 | """
Caching framework.
This package defines set of cache backends that all conform to a simple API.
In a nutshell, a cache is a set of values -- which can be any object that
may be pickled -- identified by string keys. For the complete API, see
the abstract BaseCache class in django.core.cache.backends.base.
Client code should use the `cache` variable defined here to access the default
cache backend and look up non-default cache backends in the `caches` dict-like
object.
See docs/topics/cache.txt for information on the public API.
"""
from threading import local
from django.conf import settings
from django.core import signals
from django.core.cache.backends.base import (
BaseCache, CacheKeyWarning, InvalidCacheBackendError,
)
from django.utils.module_loading import import_string
__all__ = [
'cache', 'DEFAULT_CACHE_ALIAS', 'InvalidCacheBackendError',
'CacheKeyWarning', 'BaseCache',
]
DEFAULT_CACHE_ALIAS = 'default'
def _create_cache(backend, **kwargs):
try:
# Try to get the CACHES entry for the given backend name first
try:
conf = settings.CACHES[backend]
except KeyError:
try:
# Trying to import the given backend, in case it's a dotted path
import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
backend, e))
location = kwargs.pop('LOCATION', '')
params = kwargs
else:
params = conf.copy()
params.update(kwargs)
backend = params.pop('BACKEND')
location = params.pop('LOCATION', '')
backend_cls = import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError(
"Could not find backend '%s': %s" % (backend, e))
return backend_cls(location, params)
class CacheHandler(object):
"""
A Cache Handler to manage access to Cache instances.
Ensures only one instance of each alias exists per thread.
"""
def __init__(self):
self._caches = local()
def __getitem__(self, alias):
try:
return self._caches.caches[alias]
except AttributeError:
self._caches.caches = {}
except KeyError:
pass
if alias not in settings.CACHES:
raise InvalidCacheBackendError(
"Could not find config for '%s' in settings.CACHES" % alias
)
cache = _create_cache(alias)
self._caches.caches[alias] = cache
return cache
def all(self):
return getattr(self._caches, 'caches', {}).values()
caches = CacheHandler()
class DefaultCacheProxy(object):
"""
Proxy access to the default Cache object's attributes.
This allows the legacy `cache` object to be thread-safe using the new
``caches`` API.
"""
def __getattr__(self, name):
return getattr(caches[DEFAULT_CACHE_ALIAS], name)
def __setattr__(self, name, value):
return setattr(caches[DEFAULT_CACHE_ALIAS], name, value)
def __delattr__(self, name):
return delattr(caches[DEFAULT_CACHE_ALIAS], name)
def __contains__(self, key):
return key in caches[DEFAULT_CACHE_ALIAS]
def __eq__(self, other):
return caches[DEFAULT_CACHE_ALIAS] == other
def __ne__(self, other):
return caches[DEFAULT_CACHE_ALIAS] != other
cache = DefaultCacheProxy()
def close_caches(**kwargs):
# Some caches -- python-memcached in particular -- need to do a cleanup at the
# end of a request cycle. If not implemented in a particular backend
# cache.close is a no-op
for cache in caches.all():
cache.close()
signals.request_finished.connect(close_caches)
|
fredericlepied/ansible | refs/heads/devel | test/integration/targets/module_utils/library/test_env_override.py | 170 | #!/usr/bin/python
# Most of these names are only available via PluginLoader so pylint doesn't
# know they exist
# pylint: disable=no-name-in-module
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.json_utils import data
from ansible.module_utils.mork import data as mork_data
results = {"json_utils": data, "mork": mork_data}
AnsibleModule(argument_spec=dict()).exit_json(**results)
|
WarrenWeckesser/scipy | refs/heads/master | scipy/fft/_backend.py | 12 | import scipy._lib.uarray as ua
from . import _pocketfft
class _ScipyBackend:
"""The default backend for fft calculations
Notes
-----
We use the domain ``numpy.scipy`` rather than ``scipy`` because in the
future, ``uarray`` will treat the domain as a hierarchy. This means the user
can install a single backend for ``numpy`` and have it implement
``numpy.scipy.fft`` as well.
"""
__ua_domain__ = "numpy.scipy.fft"
@staticmethod
def __ua_function__(method, args, kwargs):
fn = getattr(_pocketfft, method.__name__, None)
if fn is None:
return NotImplemented
return fn(*args, **kwargs)
_named_backends = {
'scipy': _ScipyBackend,
}
def _backend_from_arg(backend):
"""Maps strings to known backends and validates the backend"""
if isinstance(backend, str):
try:
backend = _named_backends[backend]
except KeyError as e:
raise ValueError('Unknown backend {}'.format(backend)) from e
if backend.__ua_domain__ != 'numpy.scipy.fft':
raise ValueError('Backend does not implement "numpy.scipy.fft"')
return backend
def set_global_backend(backend):
"""Sets the global fft backend
The global backend has higher priority than registered backends, but lower
priority than context-specific backends set with `set_backend`.
Parameters
----------
backend : {object, 'scipy'}
The backend to use.
Can either be a ``str`` containing the name of a known backend
{'scipy'} or an object that implements the uarray protocol.
Raises
------
ValueError: If the backend does not implement ``numpy.scipy.fft``.
Notes
-----
This will overwrite the previously set global backend, which, by default, is
the SciPy implementation.
Examples
--------
We can set the global fft backend:
>>> from scipy.fft import fft, set_global_backend
>>> set_global_backend("scipy") # Sets global backend. "scipy" is the default backend.
>>> fft([1]) # Calls the global backend
array([1.+0.j])
"""
backend = _backend_from_arg(backend)
ua.set_global_backend(backend)
def register_backend(backend):
"""
Register a backend for permanent use.
Registered backends have the lowest priority and will be tried after the
global backend.
Parameters
----------
backend : {object, 'scipy'}
The backend to use.
Can either be a ``str`` containing the name of a known backend
{'scipy'} or an object that implements the uarray protocol.
Raises
------
ValueError: If the backend does not implement ``numpy.scipy.fft``.
Examples
--------
We can register a new fft backend:
>>> from scipy.fft import fft, register_backend, set_global_backend
>>> class NoopBackend: # Define an invalid Backend
... __ua_domain__ = "numpy.scipy.fft"
... def __ua_function__(self, func, args, kwargs):
... return NotImplemented
>>> set_global_backend(NoopBackend()) # Set the invalid backend as global
>>> register_backend("scipy") # Register a new backend
>>> fft([1]) # The registered backend is called because the global backend returns `NotImplemented`
array([1.+0.j])
>>> set_global_backend("scipy") # Restore global backend to default
"""
backend = _backend_from_arg(backend)
ua.register_backend(backend)
def set_backend(backend, coerce=False, only=False):
"""Context manager to set the backend within a fixed scope.
Upon entering the ``with`` statement, the given backend will be added to
the list of available backends with the highest priority. Upon exit, the
backend is reset to the state before entering the scope.
Parameters
----------
backend : {object, 'scipy'}
The backend to use.
Can either be a ``str`` containing the name of a known backend
{'scipy'} or an object that implements the uarray protocol.
coerce : bool, optional
Whether to allow expensive conversions for the ``x`` parameter. e.g.,
copying a NumPy array to the GPU for a CuPy backend. Implies ``only``.
only : bool, optional
If only is ``True`` and this backend returns ``NotImplemented``, then a
BackendNotImplemented error will be raised immediately. Ignoring any
lower priority backends.
Examples
--------
>>> import scipy.fft as fft
>>> with fft.set_backend('scipy', only=True):
... fft.fft([1]) # Always calls the scipy implementation
array([1.+0.j])
"""
backend = _backend_from_arg(backend)
return ua.set_backend(backend, coerce=coerce, only=only)
def skip_backend(backend):
"""Context manager to skip a backend within a fixed scope.
Within the context of a ``with`` statement, the given backend will not be
called. This covers backends registered both locally and globally. Upon
exit, the backend will again be considered.
Parameters
----------
backend : {object, 'scipy'}
The backend to skip.
Can either be a ``str`` containing the name of a known backend
{'scipy'} or an object that implements the uarray protocol.
Examples
--------
>>> import scipy.fft as fft
>>> fft.fft([1]) # Calls default SciPy backend
array([1.+0.j])
>>> with fft.skip_backend('scipy'): # We explicitly skip the SciPy backend
... fft.fft([1]) # leaving no implementation available
Traceback (most recent call last):
...
BackendNotImplementedError: No selected backends had an implementation ...
"""
backend = _backend_from_arg(backend)
return ua.skip_backend(backend)
set_global_backend('scipy')
|
eneldoserrata/marcos_openerp | refs/heads/master | addons/base_report/__init__.py | 7 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import ir_actions
import ir_model
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
owlabs/incubator-airflow | refs/heads/master | tests/contrib/hooks/test_azure_fileshare_hook.py | 1 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from airflow.contrib.hooks.azure_fileshare_hook import AzureFileShareHook
from airflow.models import Connection
from airflow.utils import db
from tests.compat import mock
class TestAzureFileshareHook(unittest.TestCase):
def setUp(self):
db.merge_conn(
Connection(
conn_id='wasb_test_key', conn_type='wasb',
login='login', password='key'
)
)
db.merge_conn(
Connection(
conn_id='wasb_test_sas_token', conn_type='wasb',
login='login', extra=json.dumps({'sas_token': 'token'})
)
)
def test_key(self):
from azure.storage.file import FileService
hook = AzureFileShareHook(wasb_conn_id='wasb_test_key')
self.assertEqual(hook.conn_id, 'wasb_test_key')
self.assertIsInstance(hook.connection, FileService)
def test_sas_token(self):
from azure.storage.file import FileService
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
self.assertEqual(hook.conn_id, 'wasb_test_sas_token')
self.assertIsInstance(hook.connection, FileService)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_check_for_file(self, mock_service):
mock_instance = mock_service.return_value
mock_instance.exists.return_value = True
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
self.assertTrue(hook.check_for_file('share', 'directory', 'file', timeout=3))
mock_instance.exists.assert_called_once_with(
'share', 'directory', 'file', timeout=3
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_check_for_directory(self, mock_service):
mock_instance = mock_service.return_value
mock_instance.exists.return_value = True
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
self.assertTrue(hook.check_for_directory('share', 'directory', timeout=3))
mock_instance.exists.assert_called_once_with(
'share', 'directory', timeout=3
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_load_file(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.load_file('path', 'share', 'directory', 'file', max_connections=1)
mock_instance.create_file_from_path.assert_called_once_with(
'share', 'directory', 'file', 'path', max_connections=1
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_load_string(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.load_string('big string', 'share', 'directory', 'file', timeout=1)
mock_instance.create_file_from_text.assert_called_once_with(
'share', 'directory', 'file', 'big string', timeout=1
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_load_stream(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.load_stream('stream', 'share', 'directory', 'file', 42, timeout=1)
mock_instance.create_file_from_stream.assert_called_once_with(
'share', 'directory', 'file', 'stream', 42, timeout=1
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_list_directories_and_files(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.list_directories_and_files('share', 'directory', timeout=1)
mock_instance.list_directories_and_files.assert_called_once_with(
'share', 'directory', timeout=1
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_create_directory(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.create_directory('share', 'directory', timeout=1)
mock_instance.create_directory.assert_called_once_with(
'share', 'directory', timeout=1
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_get_file(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.get_file('path', 'share', 'directory', 'file', max_connections=1)
mock_instance.get_file_to_path.assert_called_once_with(
'share', 'directory', 'file', 'path', max_connections=1
)
@mock.patch('airflow.contrib.hooks.azure_fileshare_hook.FileService',
autospec=True)
def test_get_file_to_stream(self, mock_service):
mock_instance = mock_service.return_value
hook = AzureFileShareHook(wasb_conn_id='wasb_test_sas_token')
hook.get_file_to_stream('stream', 'share', 'directory', 'file', max_connections=1)
mock_instance.get_file_to_stream.assert_called_once_with(
'share', 'directory', 'file', 'stream', max_connections=1
)
if __name__ == '__main__':
unittest.main()
|
School-of-Innovation-Experiment/InnovationManagement | refs/heads/master | facultyStaff/views.py | 6027 | # Create your views here.
|
zzw922cn/Automatic_Speech_Recognition | refs/heads/master | speechvalley/feature/madarian/preprocess.py | 1 | # encoding: utf-8
# ******************************************************
# Author : zzw922cn
# Last modified: 2017-12-09 11:00
# Email : zzw922cn@gmail.com
# Filename : madarian_preprocess.py
# Description : Feature preprocessing for some Madarian dataset
# ******************************************************
from speechvalley.feature.madarian import convertDigit2Character
from speechvalley.feature.madarian import convertCharacter2Digit
class DigitPrecessor(object):
def __init__(self, mode):
assert mode=='digit2char' or mode=='char2digit', "Wrong mode: %s" % str(mode)
self.mode = mode
def processString(self, string):
if self.mode == 'digit2char':
return convertDigit2Character(string)
else:
return convertCharacter2Digit(string)
def processFile(self, fileName):
result = []
assert os.path.isfile(fileName), "Wrong file path: %s" % str(fileName)
with codecs.open(fileName,'r','utf-8') as f:
content=f.readlines()
if self.mode == 'digit2char':
for string in content:
result.append(convertDigit2Character(string))
else:
for string in content:
result.append(convertCharacter2Digit(string))
return result
if __name__ == '__main__':
DP = DigitProcessor(mode='digit2char')
|
cynapse/cynin | refs/heads/master | src/ubify.coretypes/ubify/coretypes/content/contentspace.py | 5 | ###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy of the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Public License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at support@cynapse.com with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# legal@cynapse.com
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
from Products.Archetypes.atapi import *
from Products.ATContentTypes.content.folder import ATBTreeFolder as BaseClass
from Products.ATContentTypes.content.folder import ATBTreeFolderSchema as DefaultSchema
from Products.ATContentTypes.content.base import registerATCT
from ubify.coretypes.config import PROJECTNAME,applications
from zope.interface import implements
from ubify.coretypes.interfaces import IImportExportCoretype,ISyndication,IApplicationPerspectives,IContentSpace, ITypesRestriction
from Products.CMFCore.utils import getToolByName
schema = DefaultSchema.copy()
class ContentSpace(BaseClass):
implements(IContentSpace,IImportExportCoretype,ISyndication,IApplicationPerspectives,ITypesRestriction)
__doc__ = BaseClass.__doc__ + "(customizable version)"
portal_type = "ContentSpace"
archetype_name = BaseClass.archetype_name
if schema['title'] <> None and schema['title'].widget <> None:
if schema['title'].widget.maxlength:
schema['title'].widget.maxlength = '60'
schema = schema
# Override initializeArchetype to turn on syndication by default
def initializeArchetype(self, **kwargs):
ret_val = BaseClass.initializeArchetype(self, **kwargs)
# Enable topic syndication by default
self.enableSyndication()
return ret_val
def enableSyndication(self):
syn_tool = getToolByName(self, 'portal_syndication', None)
if syn_tool is not None:
if (syn_tool.isSiteSyndicationAllowed() and
not syn_tool.isSyndicationAllowed(self)):
syn_tool.enableSyndication(self)
def getEntries(self,num_of_entries):
"""Getter for syndacation support
"""
syn_tool = getToolByName(self, 'portal_syndication')
if num_of_entries is None:
num_of_entries = int(syn_tool.getMaxItems(self))
return self.queryfolderbytype()[0][:num_of_entries]
def synContentValues(self):
"""Getter for syndacation support
"""
syn_tool = getToolByName(self, 'portal_syndication')
num_of_entries = int(syn_tool.getMaxItems(self))
brains = self.getEntries(num_of_entries)
objs = [brain.getObject() for brain in brains]
return [obj for obj in objs if obj is not None]
def listApplications(self):
apps = []
apps.extend(applications)
gen_items = [k['id'] for k in apps[1:3]]
type_items = [k['id'] for k in apps[4:]]
apps.remove(apps[11])
showall = True
if self.hasProperty('availableappviews'):
available_appviews = self.getProperty('availableappviews')
if len(available_appviews) > 0:
showall = False
for eachapp in apps:
if eachapp['id'] in available_appviews:
eachapp['visible'] = True
else:
eachapp['visible'] = False
if eachapp['id'] == 'genericseparator':
eachapp['visible'] = len([k for k in available_appviews if k in gen_items]) > 0
elif eachapp['id'] == 'appviewseparator':
eachapp['visible'] = len([k for k in available_appviews if k in type_items]) > 0
if showall:
for eachapp in apps:
eachapp['visible'] = True
returnapps = [k for k in apps if k['visible'] == True]
return returnapps
def disallowedtypes(self):
ptypes_tool = getToolByName(self,'portal_types')
fti = getattr(ptypes_tool,self.portal_type)
allowedtypes = []
if fti <> None:
allowedtypes = fti.allowed_content_types
dis_types = [obj for obj in allowedtypes if obj not in self.getImmediatelyAddableTypes()]
return dis_types
registerATCT(ContentSpace, PROJECTNAME) |
marctc/django | refs/heads/master | tests/admin_inlines/test_templates.py | 285 | from __future__ import unicode_literals
from django.template.loader import render_to_string
from django.test import SimpleTestCase
class TestTemplates(SimpleTestCase):
def test_javascript_escaping(self):
context = {
'inline_admin_formset': {
'formset': {'prefix': 'my-prefix'},
'opts': {'verbose_name': 'verbose name\\'},
},
}
output = render_to_string('admin/edit_inline/stacked.html', context)
self.assertIn('prefix: "my\\u002Dprefix",', output)
self.assertIn('addText: "Add another Verbose name\\u005C"', output)
output = render_to_string('admin/edit_inline/tabular.html', context)
self.assertIn('prefix: "my\\u002Dprefix",', output)
self.assertIn('addText: "Add another Verbose name\\u005C"', output)
|
clebergnu/autotest | refs/heads/master | client/tests/kvm/tests/stepmaker.py | 2 | #!/usr/bin/python
"""
Step file creator/editor.
@copyright: Red Hat Inc 2009
@author: mgoldish@redhat.com (Michael Goldish)
@version: "20090401"
"""
import pygtk, gtk, gobject, time, os, commands, logging
import common
from autotest_lib.client.common_lib import error
from autotest_lib.client.virt import virt_utils, ppm_utils, virt_step_editor
from autotest_lib.client.virt import kvm_monitor
pygtk.require('2.0')
class StepMaker(virt_step_editor.StepMakerWindow):
"""
Application used to create a step file. It will grab your input to the
virtual machine and record it on a 'step file', that can be played
making it possible to do unattended installs.
"""
# Constructor
def __init__(self, vm, steps_filename, tempdir, params):
virt_step_editor.StepMakerWindow.__init__(self)
self.vm = vm
self.steps_filename = steps_filename
self.steps_data_dir = ppm_utils.get_data_dir(steps_filename)
self.tempdir = tempdir
self.screendump_filename = os.path.join(tempdir, "scrdump.ppm")
self.params = params
if not os.path.exists(self.steps_data_dir):
os.makedirs(self.steps_data_dir)
self.steps_file = open(self.steps_filename, "w")
self.vars_file = open(os.path.join(self.steps_data_dir, "vars"), "w")
self.step_num = 1
self.run_time = 0
self.update_delay = 1000
self.prev_x = 0
self.prev_y = 0
self.vars = {}
self.timer_id = None
self.time_when_done_clicked = time.time()
self.time_when_actions_completed = time.time()
self.steps_file.write("# Generated by Step Maker\n")
self.steps_file.write("# Generated on %s\n" % time.asctime())
self.steps_file.write("# uname -a: %s\n" %
commands.getoutput("uname -a"))
self.steps_file.flush()
self.vars_file.write("# This file lists the vars used during recording"
" with Step Maker\n")
self.vars_file.flush()
# Done/Break HBox
hbox = gtk.HBox(spacing=10)
self.user_vbox.pack_start(hbox)
hbox.show()
self.button_break = gtk.Button("Break")
self.button_break.connect("clicked", self.event_break_clicked)
hbox.pack_start(self.button_break)
self.button_break.show()
self.button_done = gtk.Button("Done")
self.button_done.connect("clicked", self.event_done_clicked)
hbox.pack_start(self.button_done)
self.button_done.show()
# Set window title
self.window.set_title("Step Maker")
# Connect "capture" button
self.button_capture.connect("clicked", self.event_capture_clicked)
# Switch to run mode
self.switch_to_run_mode()
def destroy(self, widget):
self.vm.monitor.cmd("cont")
self.steps_file.close()
self.vars_file.close()
virt_step_editor.StepMakerWindow.destroy(self, widget)
# Utilities
def redirect_timer(self, delay=0, func=None):
if self.timer_id != None:
gobject.source_remove(self.timer_id)
self.timer_id = None
if func != None:
self.timer_id = gobject.timeout_add(delay, func,
priority=gobject.PRIORITY_LOW)
def switch_to_run_mode(self):
# Set all widgets to their default states
self.clear_state(clear_screendump=False)
# Enable/disable some widgets
self.button_break.set_sensitive(True)
self.button_done.set_sensitive(False)
self.data_vbox.set_sensitive(False)
# Give focus to the Break button
self.button_break.grab_focus()
# Start the screendump timer
self.redirect_timer(100, self.update)
# Resume the VM
self.vm.monitor.cmd("cont")
def switch_to_step_mode(self):
# Set all widgets to their default states
self.clear_state(clear_screendump=False)
# Enable/disable some widgets
self.button_break.set_sensitive(False)
self.button_done.set_sensitive(True)
self.data_vbox.set_sensitive(True)
# Give focus to the keystrokes entry widget
self.entry_keys.grab_focus()
# Start the screendump timer
self.redirect_timer()
# Stop the VM
self.vm.monitor.cmd("stop")
# Events in step mode
def update(self):
self.redirect_timer()
if os.path.exists(self.screendump_filename):
os.unlink(self.screendump_filename)
try:
self.vm.monitor.screendump(self.screendump_filename, debug=False)
except kvm_monitor.MonitorError, e:
logging.warn(e)
else:
self.set_image_from_file(self.screendump_filename)
self.redirect_timer(self.update_delay, self.update)
return True
def event_break_clicked(self, widget):
if not self.vm.is_alive():
self.message("The VM doesn't seem to be alive.", "Error")
return
# Switch to step mode
self.switch_to_step_mode()
# Compute time elapsed since last click on "Done" and add it
# to self.run_time
self.run_time += time.time() - self.time_when_done_clicked
# Set recording time widget
self.entry_time.set_text("%.2f" % self.run_time)
# Update screendump ID
self.update_screendump_id(self.steps_data_dir)
# By default, check the barrier checkbox
self.check_barrier.set_active(True)
# Set default sleep and barrier timeout durations
time_delta = time.time() - self.time_when_actions_completed
if time_delta < 1.0: time_delta = 1.0
self.spin_sleep.set_value(round(time_delta))
self.spin_barrier_timeout.set_value(round(time_delta * 5))
# Set window title
self.window.set_title("Step Maker -- step %d at time %.2f" %
(self.step_num, self.run_time))
def event_done_clicked(self, widget):
# Get step lines and screendump
lines = self.get_step_lines(self.steps_data_dir)
if lines == None:
return
# Get var values from user and write them to vars file
vars = {}
for line in lines.splitlines():
words = line.split()
if words and words[0] == "var":
varname = words[1]
if varname in self.vars.keys():
val = self.vars[varname]
elif varname in vars.keys():
val = vars[varname]
elif varname in self.params.keys():
val = self.params[varname]
vars[varname] = val
else:
val = self.inputdialog("$%s =" % varname, "Variable")
if val == None:
return
vars[varname] = val
for varname in vars.keys():
self.vars_file.write("%s=%s\n" % (varname, vars[varname]))
self.vars.update(vars)
# Write step lines to file
self.steps_file.write("# " + "-" * 32 + "\n")
self.steps_file.write(lines)
# Flush buffers of both files
self.steps_file.flush()
self.vars_file.flush()
# Remember the current time
self.time_when_done_clicked = time.time()
# Switch to run mode
self.switch_to_run_mode()
# Send commands to VM
for line in lines.splitlines():
words = line.split()
if not words:
continue
elif words[0] == "key":
self.vm.send_key(words[1])
elif words[0] == "var":
val = self.vars.get(words[1])
if not val:
continue
self.vm.send_string(val)
elif words[0] == "mousemove":
self.vm.monitor.mouse_move(-8000, -8000)
time.sleep(0.5)
self.vm.monitor.mouse_move(words[1], words[2])
time.sleep(0.5)
elif words[0] == "mouseclick":
self.vm.monitor.mouse_button(words[1])
time.sleep(0.1)
self.vm.monitor.mouse_button(0)
# Remember the current time
self.time_when_actions_completed = time.time()
# Move on to next step
self.step_num += 1
def event_capture_clicked(self, widget):
self.message("Mouse actions disabled (for now).", "Sorry")
return
self.image_width_backup = self.image_width
self.image_height_backup = self.image_height
self.image_data_backup = self.image_data
gtk.gdk.pointer_grab(self.event_box.window, False,
gtk.gdk.BUTTON_PRESS_MASK |
gtk.gdk.BUTTON_RELEASE_MASK)
# Create empty cursor
pix = gtk.gdk.Pixmap(self.event_box.window, 1, 1, 1)
color = gtk.gdk.Color()
cursor = gtk.gdk.Cursor(pix, pix, color, color, 0, 0)
self.event_box.window.set_cursor(cursor)
gtk.gdk.display_get_default().warp_pointer(gtk.gdk.screen_get_default(),
self.prev_x, self.prev_y)
self.redirect_event_box_input(
self.event_capture_button_press,
self.event_capture_button_release,
self.event_capture_scroll)
self.redirect_timer(10, self.update_capture)
self.vm.monitor.cmd("cont")
# Events in mouse capture mode
def update_capture(self):
self.redirect_timer()
(screen, x, y, flags) = gtk.gdk.display_get_default().get_pointer()
self.mouse_click_coords[0] = int(x * self.spin_sensitivity.get_value())
self.mouse_click_coords[1] = int(y * self.spin_sensitivity.get_value())
delay = self.spin_latency.get_value() / 1000
if (x, y) != (self.prev_x, self.prev_y):
self.vm.monitor.mouse_move(-8000, -8000)
time.sleep(delay)
self.vm.monitor.mouse_move(self.mouse_click_coords[0],
self.mouse_click_coords[1])
time.sleep(delay)
self.prev_x = x
self.prev_y = y
if os.path.exists(self.screendump_filename):
os.unlink(self.screendump_filename)
try:
self.vm.monitor.screendump(self.screendump_filename, debug=False)
except kvm_monitor.MonitorError, e:
logging.warn(e)
else:
self.set_image_from_file(self.screendump_filename)
self.redirect_timer(int(self.spin_latency.get_value()),
self.update_capture)
return True
def event_capture_button_press(self, widget,event):
pass
def event_capture_button_release(self, widget,event):
gtk.gdk.pointer_ungrab()
self.event_box.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.CROSSHAIR))
self.redirect_event_box_input(
self.event_button_press,
self.event_button_release,
None,
None,
self.event_expose)
self.redirect_timer()
self.vm.monitor.cmd("stop")
self.mouse_click_captured = True
self.mouse_click_button = event.button
self.set_image(self.image_width_backup, self.image_height_backup,
self.image_data_backup)
self.check_mousemove.set_sensitive(True)
self.check_mouseclick.set_sensitive(True)
self.check_mousemove.set_active(True)
self.check_mouseclick.set_active(True)
self.update_mouse_click_info()
def event_capture_scroll(self, widget, event):
if event.direction == gtk.gdk.SCROLL_UP:
direction = 1
else:
direction = -1
self.spin_sensitivity.set_value(self.spin_sensitivity.get_value() +
direction)
pass
def run_stepmaker(test, params, env):
vm = env.get_vm(params.get("main_vm"))
if not vm:
raise error.TestError("VM object not found in environment")
if not vm.is_alive():
raise error.TestError("VM seems to be dead; Step Maker requires a"
" living VM")
steps_filename = params.get("steps")
if not steps_filename:
raise error.TestError("Steps filename not specified")
steps_filename = virt_utils.get_path(test.bindir, steps_filename)
if os.path.exists(steps_filename):
raise error.TestError("Steps file %s already exists" % steps_filename)
StepMaker(vm, steps_filename, test.debugdir, params)
gtk.main()
|
Tilo15/PhotoFiddle2 | refs/heads/master | PF2/Sounds.py | 1 | import subprocess
import threading
class SystemSounds:
@staticmethod
def play_sound(id):
threading.Thread(target=SystemSounds.play_sound_blocking, args=(id,)).start()
@staticmethod
def play_sound_blocking(id):
subprocess.call(['/usr/bin/canberra-gtk-play','--id',id])
@staticmethod
def window_attention():
SystemSounds.play_sound("window-attention")
@staticmethod
def complete():
SystemSounds.play_sound("complete") |
demarle/VTK | refs/heads/master | ThirdParty/Twisted/twisted/news/news.py | 57 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Maintainer: Jp Calderone
"""
from twisted.news import nntp
from twisted.internet import protocol, reactor
import time
class NNTPFactory(protocol.ServerFactory):
"""A factory for NNTP server protocols."""
protocol = nntp.NNTPServer
def __init__(self, backend):
self.backend = backend
def buildProtocol(self, connection):
p = self.protocol()
p.factory = self
return p
class UsenetClientFactory(protocol.ClientFactory):
def __init__(self, groups, storage):
self.lastChecks = {}
self.groups = groups
self.storage = storage
def clientConnectionLost(self, connector, reason):
pass
def clientConnectionFailed(self, connector, reason):
print 'Connection failed: ', reason
def updateChecks(self, addr):
self.lastChecks[addr] = time.mktime(time.gmtime())
def buildProtocol(self, addr):
last = self.lastChecks.setdefault(addr, time.mktime(time.gmtime()) - (60 * 60 * 24 * 7))
p = nntp.UsenetClientProtocol(self.groups, last, self.storage)
p.factory = self
return p
# XXX - Maybe this inheritence doesn't make so much sense?
class UsenetServerFactory(NNTPFactory):
"""A factory for NNTP Usenet server protocols."""
protocol = nntp.NNTPServer
def __init__(self, backend, remoteHosts = None, updatePeriod = 60):
NNTPFactory.__init__(self, backend)
self.updatePeriod = updatePeriod
self.remoteHosts = remoteHosts or []
self.clientFactory = UsenetClientFactory(self.remoteHosts, self.backend)
def startFactory(self):
self._updateCall = reactor.callLater(0, self.syncWithRemotes)
def stopFactory(self):
if self._updateCall:
self._updateCall.cancel()
self._updateCall = None
def buildProtocol(self, connection):
p = self.protocol()
p.factory = self
return p
def syncWithRemotes(self):
for remote in self.remoteHosts:
reactor.connectTCP(remote, 119, self.clientFactory)
self._updateCall = reactor.callLater(self.updatePeriod, self.syncWithRemotes)
# backwards compatability
Factory = UsenetServerFactory
|
elbermu/cerux_kernel-touchwiz | refs/heads/master | tools/perf/scripts/python/sctop.py | 11180 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
blueskycoco/sq-linux | refs/heads/master | tools/perf/scripts/python/sctop.py | 11180 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
ezralanglois/arachnid | refs/heads/master | arachnid/snippets/image/interpolate_volume.py | 1 | ''' Interporate a volume
Download to edit and run: :download:`interpolate_volume.py <../../arachnid/snippets/image/interpolate_volume.py>`
To run:
.. sourcecode:: sh
$ python interpolate_volume.py
.. literalinclude:: ../../arachnid/snippets/image/interpolate_volume.py
:language: python
:lines: 16-
:linenos:
'''
import sys
from arachnid.core.image import ndimage_file
from arachnid.core.image import ndimage_interpolate
if __name__ == '__main__':
# Parameters
input_file = sys.argv[1]
bin_factor = float(sys.argv[2])
output_file = sys.argv[3]
img=ndimage_file.read_image(input_file)
img=ndimage_interpolate.interpolate_bilinear(img, bin_factor)
ndimage_file.write_image(output_file, img) |
kenohori/IfcOpenShell_CGAL | refs/heads/cgal | src/ifcexpressparser/nodes.py | 7 | ###############################################################################
# #
# This file is part of IfcOpenShell. #
# #
# IfcOpenShell is free software: you can redistribute it and/or modify #
# it under the terms of the Lesser GNU General Public License as published by #
# the Free Software Foundation, either version 3.0 of the License, or #
# (at your option) any later version. #
# #
# IfcOpenShell is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# Lesser GNU General Public License for more details. #
# #
# You should have received a copy of the Lesser GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
import string
import collections
class Node:
def __init__(self, tokens = None):
self.tokens = tokens or []
self.init()
def tokens_of_type(self, cls):
return [t for t in self.tokens if isinstance(t, cls)]
def single_token_of_type(self, cls, k = None, v = None):
ts = [t for t in self.tokens if isinstance(t, cls) and (k is None or getattr(t, k) == v)]
return ts[0] if len(ts) == 1 else None
class TypeDeclaration(Node):
name = property(lambda self: self.tokens[1])
type = property(lambda self: self.tokens[3])
def init(self):
assert self.tokens[0] == 'type'
assert isinstance(self.type, UnderlyingType)
def __repr__(self):
return "%s = TypeDeclaration(%s)" % (self.name, self.type)
class EntityDeclaration(Node):
name = property(lambda self: self.tokens[1])
attributes = property(lambda self: self.tokens_of_type(ExplicitAttribute))
def init(self):
assert self.tokens[0] == 'entity'
s = self.single_token_of_type(SubtypeExpression)
self.inverse = self.single_token_of_type(AttributeList, 'type', 'inverse')
self.derive = self.single_token_of_type(AttributeList, 'type', 'derive')
self.supertypes = s.types if s else []
def __repr__(self):
builder = ""
builder += "Entity(%s)" % (self.name)
if len(self.supertypes):
builder += "\n Supertypes: %s"%(",".join(self.supertypes))
if len(self.attributes):
builder += "\n Attributes: %s"%("".join(["\n %s"%a for a in self.attributes]))
if self.derive:
builder += "\n Derive:"
builder += str(self.derive)
if self.inverse:
builder += "\n Inverse:"
builder += str(self.inverse)
builder += "\n"
return builder
class UnderlyingType(Node):
type = property(lambda self: self.tokens[0])
def init(self):
pass
def __repr__(self):
return repr(self.type)
class EnumerationType(Node):
type = property(lambda self: self.tokens[0])
values = property(lambda self: self.tokens[3::2])
def init(self):
assert self.type == 'enumeration'
def __repr__(self):
return ",".join(self.values)
class AggregationType(Node):
aggregate_type = property(lambda self: self.tokens[0])
bounds = property(lambda self: None if self.tokens[1] == 'of' else self.tokens[1])
type = property(lambda self: self.tokens[-1])
def init(self):
assert self.bounds is None or isinstance(self.bounds, BoundSpecification)
def __repr__(self):
return "%s%s of %s"%(self.aggregate_type, self.bounds, self.type)
class SelectType(Node):
type = property(lambda self: self.tokens[0])
values = property(lambda self: self.tokens[2::2])
def init(self):
assert self.type == 'select'
def __repr__(self):
return ",".join(self.values)
class SubSuperTypeExpression(Node):
type = property(lambda self: self.tokens[0])
types = property(lambda self: self.tokens[3::2])
def init(self):
assert self.type == self.class_type
class SubtypeExpression(SubSuperTypeExpression):
class_type = 'subtype'
class AttributeList(Node):
elements = property(lambda self: self.tokens[1:])
def __init__(self, ty, toks):
self.type = ty
Node.__init__(self, toks)
def init(self):
assert self.type == self.tokens[0]
def __repr__(self):
return "".join(["\n %s"%s for s in self.elements])
class InverseAttribute(Node):
name = property(lambda self: self.tokens[0])
type = property(lambda self: self.tokens[2])
bounds = property(lambda self: None if len(self.tokens) != 9 else self.tokens[3])
entity = property(lambda self: self.tokens[-4])
attribute = property(lambda self: self.tokens[-2])
def init(self):
assert self.bounds is None or isinstance(self.bounds, BoundSpecification)
def __repr__(self):
return "%s = %s.%s (%s%s)"%(self.name, self.entity, self.attribute, self.type, self.bounds or "")
class DerivedAttribute(Node):
def init(self):
name_index = list(self.tokens).index(':') - 1
self.name = self.tokens[name_index]
def __repr__(self):
return str(self.name)
class BinaryType(Node):
def init(self):
pass
def __repr__(self):
return "binary"
class BoundSpecification(Node):
lower = property(lambda self: self.tokens[1])
upper = property(lambda self: self.tokens[3])
def init(self):
# assert self.lower in string.digits or self.lower == '?'
# assert self.upper in string.digits or self.upper == '?'
pass
def __repr__(self):
return "[%s:%s]"%(self.lower, self.upper)
class ExplicitAttribute(Node):
name = property(lambda self: self.tokens[0])
type = property(lambda self: self.tokens[-2])
optional = property(lambda self: len(self.tokens) == 5 and self.tokens[-3] == 'optional')
def init(self):
# NB: This assumes a single name per attribute
# definition, which is not necessarily the case.
if self.tokens[0] == "self":
i = list(self.tokens).index(":")
self.tokens = self.tokens[i-1:]
assert self.tokens[1] == ':'
def __repr__(self):
return "%s : %s%s" % (self.name, self.type, " ?" if self.optional else "")
class WidthSpec(Node):
def init(self):
if self.tokens[-1] == "fixed":
self.tokens[-1:] = []
assert (self.tokens[0], self.tokens[-1]) == ("(", ")")
self.width = int("".join(self.tokens[1:-1]))
class StringType(Node):
def init(self):
pass
def __repr__(self):
return "string"
|
bigswitch/nova | refs/heads/master | nova/api/openstack/versioned_method.py | 97 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class VersionedMethod(object):
def __init__(self, name, start_version, end_version, func):
"""Versioning information for a single method
@name: Name of the method
@start_version: Minimum acceptable version
@end_version: Maximum acceptable_version
@func: Method to call
Minimum and maximums are inclusive
"""
self.name = name
self.start_version = start_version
self.end_version = end_version
self.func = func
def __str__(self):
return ("Version Method %s: min: %s, max: %s"
% (self.name, self.start_version, self.end_version))
|
RealImpactAnalytics/airflow | refs/heads/master | tests/task/__init__.py | 23 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# flake8: noqa
from __future__ import absolute_import
from .task_runner import *
|
natanielruiz/android-yolo | refs/heads/master | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/distribution_util.py | 3 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for probability distributions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
def assert_close(
x, y, data=None, summarize=None, message=None, name="assert_close"):
"""Assert that that x and y are within machine epsilon of each other.
Args:
x: Numeric `Tensor`
y: Numeric `Tensor`
data: The tensors to print out if the condition is `False`. Defaults to
error message and first few entries of `x` and `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Returns:
Op raising `InvalidArgumentError` if |x - y| > machine epsilon.
"""
message = message or ""
x = ops.convert_to_tensor(x, name="x")
y = ops.convert_to_tensor(y, name="y")
if x.dtype.is_integer:
return check_ops.assert_equal(
x, y, data=data, summarize=summarize, message=message, name=name)
with ops.op_scope([x, y, data], name, "assert_close"):
tol = np.finfo(x.dtype.as_numpy_dtype).resolution
if data is None:
data = [
message,
"Condition x ~= y did not hold element-wise: x = ", x.name, x, "y = ",
y.name, y
]
condition = math_ops.reduce_all(math_ops.less_equal(math_ops.abs(x-y), tol))
return logging_ops.Assert(
condition, data, summarize=summarize)
def assert_integer_form(
x, data=None, summarize=None, message=None, name="assert_integer_form"):
"""Assert that x has integer components (or floats equal to integers).
Args:
x: Numeric `Tensor`
data: The tensors to print out if the condition is `False`. Defaults to
error message and first few entries of `x` and `y`.
summarize: Print this many entries of each tensor.
message: A string to prefix to the default message.
name: A name for this operation (optional).
Returns:
Op raising `InvalidArgumentError` if round(x) != x.
"""
message = message or "x has non-integer components"
x = ops.convert_to_tensor(x, name="x")
casted_x = math_ops.to_int64(x)
return check_ops.assert_equal(
x, math_ops.cast(math_ops.round(casted_x), x.dtype),
data=data, summarize=summarize, message=message, name=name)
def get_logits_and_prob(
logits=None, p=None, multidimensional=False, validate_args=True, name=None):
"""Converts logits to probabilities and vice-versa, and returns both.
Args:
logits: Numeric `Tensor` representing log-odds.
p: Numeric `Tensor` representing probabilities.
multidimensional: Given `p` a [N1, N2, ... k] dimensional tensor,
whether the last dimension represents the probability between k classes.
This will additionally assert that the values in the last dimension
sum to one. If `False`, will instead assert that each value is in
`[0, 1]`.
validate_args: Whether to assert `0 <= p <= 1` if multidimensional is
`False`, otherwise that the last dimension of `p` sums to one.
name: A name for this operation (optional).
Returns:
Tuple with `logits` and `p`. If `p` has an entry that is `0` or `1`, then
the corresponding entry in the returned logits will be `-Inf` and `Inf`
respectively.
Raises:
ValueError: if neither `p` nor `logits` were passed in, or both were.
"""
if p is None and logits is None:
raise ValueError("Must pass p or logits.")
elif p is not None and logits is not None:
raise ValueError("Must pass either p or logits, not both.")
elif p is None:
with ops.op_scope([logits], name):
logits = array_ops.identity(logits, name="logits")
with ops.name_scope(name):
with ops.name_scope("p"):
p = math_ops.sigmoid(logits)
elif logits is None:
with ops.name_scope(name):
with ops.name_scope("p"):
p = array_ops.identity(p)
if validate_args:
one = constant_op.constant(1., p.dtype)
dependencies = [check_ops.assert_non_negative(p)]
if multidimensional:
dependencies += [assert_close(
math_ops.reduce_sum(p, reduction_indices=[-1]),
one, message="p does not sum to 1.")]
else:
dependencies += [check_ops.assert_less_equal(
p, one, message="p has components greater than 1.")]
p = control_flow_ops.with_dependencies(dependencies, p)
with ops.name_scope("logits"):
logits = math_ops.log(p) - math_ops.log(1. - p)
return (logits, p)
def log_combinations(n, counts, name="log_combinations"):
"""Multinomial coefficient.
Given `n` and `counts`, where `counts` has last dimension `k`, we compute
the multinomial coefficient as:
```n! / sum_i n_i!```
where `i` runs over all `k` classes.
Args:
n: Numeric `Tensor` broadcastable with `counts`. This represents `n`
outcomes.
counts: Numeric `Tensor` broadcastable with `n`. This represents counts
in `k` classes, where `k` is the last dimension of the tensor.
name: A name for this operation (optional).
Returns:
`Tensor` representing the multinomial coefficient between `n` and `counts`.
"""
# First a bit about the number of ways counts could have come in:
# E.g. if counts = [1, 2], then this is 3 choose 2.
# In general, this is (sum counts)! / sum(counts!)
# The sum should be along the last dimension of counts. This is the
# "distribution" dimension. Here n a priori represents the sum of counts.
with ops.op_scope([n, counts], name):
total_permutations = math_ops.lgamma(n + 1)
counts_factorial = math_ops.lgamma(counts + 1)
redundant_permutations = math_ops.reduce_sum(counts_factorial,
reduction_indices=[-1])
return total_permutations - redundant_permutations
|
AOSP-SGS2/android_kernel_samsung_galaxynote | refs/heads/ics | tools/perf/scripts/python/syscall-counts-by-pid.py | 944 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
usage = "perf trace -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
pass
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38d %10d\n" % (id, val),
|
40223104/w16b_test | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/multiprocessing/util.py | 696 | #
# Module providing various facilities to other parts of the package
#
# multiprocessing/util.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
import sys
import functools
import os
import itertools
import weakref
import atexit
import threading # we want threading to install it's
# cleanup function before multiprocessing does
from subprocess import _args_from_interpreter_flags
from multiprocessing.process import current_process, active_children
__all__ = [
'sub_debug', 'debug', 'info', 'sub_warning', 'get_logger',
'log_to_stderr', 'get_temp_dir', 'register_after_fork',
'is_exiting', 'Finalize', 'ForkAwareThreadLock', 'ForkAwareLocal',
'SUBDEBUG', 'SUBWARNING',
]
#
# Logging
#
NOTSET = 0
SUBDEBUG = 5
DEBUG = 10
INFO = 20
SUBWARNING = 25
LOGGER_NAME = 'multiprocessing'
DEFAULT_LOGGING_FORMAT = '[%(levelname)s/%(processName)s] %(message)s'
_logger = None
_log_to_stderr = False
def sub_debug(msg, *args):
if _logger:
_logger.log(SUBDEBUG, msg, *args)
def debug(msg, *args):
if _logger:
_logger.log(DEBUG, msg, *args)
def info(msg, *args):
if _logger:
_logger.log(INFO, msg, *args)
def sub_warning(msg, *args):
if _logger:
_logger.log(SUBWARNING, msg, *args)
def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger
def log_to_stderr(level=None):
'''
Turn on logging and add a handler which prints to stderr
'''
global _log_to_stderr
import logging
logger = get_logger()
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
if level:
logger.setLevel(level)
_log_to_stderr = True
return _logger
#
# Function returning a temp directory which will be removed on exit
#
def get_temp_dir():
# get name of a temp directory which will be automatically cleaned up
if current_process()._tempdir is None:
import shutil, tempfile
tempdir = tempfile.mkdtemp(prefix='pymp-')
info('created temp directory %s', tempdir)
Finalize(None, shutil.rmtree, args=[tempdir], exitpriority=-100)
current_process()._tempdir = tempdir
return current_process()._tempdir
#
# Support for reinitialization of objects when bootstrapping a child process
#
_afterfork_registry = weakref.WeakValueDictionary()
_afterfork_counter = itertools.count()
def _run_after_forkers():
items = list(_afterfork_registry.items())
items.sort()
for (index, ident, func), obj in items:
try:
func(obj)
except Exception as e:
info('after forker raised exception %s', e)
def register_after_fork(obj, func):
_afterfork_registry[(next(_afterfork_counter), id(obj), func)] = obj
#
# Finalization using weakrefs
#
_finalizer_registry = {}
_finalizer_counter = itertools.count()
class Finalize(object):
'''
Class which supports object finalization using weakrefs
'''
def __init__(self, obj, callback, args=(), kwargs=None, exitpriority=None):
assert exitpriority is None or type(exitpriority) is int
if obj is not None:
self._weakref = weakref.ref(obj, self)
else:
assert exitpriority is not None
self._callback = callback
self._args = args
self._kwargs = kwargs or {}
self._key = (exitpriority, next(_finalizer_counter))
self._pid = os.getpid()
_finalizer_registry[self._key] = self
def __call__(self, wr=None,
# Need to bind these locally because the globals can have
# been cleared at shutdown
_finalizer_registry=_finalizer_registry,
sub_debug=sub_debug, getpid=os.getpid):
'''
Run the callback unless it has already been called or cancelled
'''
try:
del _finalizer_registry[self._key]
except KeyError:
sub_debug('finalizer no longer registered')
else:
if self._pid != getpid():
sub_debug('finalizer ignored because different process')
res = None
else:
sub_debug('finalizer calling %s with args %s and kwargs %s',
self._callback, self._args, self._kwargs)
res = self._callback(*self._args, **self._kwargs)
self._weakref = self._callback = self._args = \
self._kwargs = self._key = None
return res
def cancel(self):
'''
Cancel finalization of the object
'''
try:
del _finalizer_registry[self._key]
except KeyError:
pass
else:
self._weakref = self._callback = self._args = \
self._kwargs = self._key = None
def still_active(self):
'''
Return whether this finalizer is still waiting to invoke callback
'''
return self._key in _finalizer_registry
def __repr__(self):
try:
obj = self._weakref()
except (AttributeError, TypeError):
obj = None
if obj is None:
return '<Finalize object, dead>'
x = '<Finalize object, callback=%s' % \
getattr(self._callback, '__name__', self._callback)
if self._args:
x += ', args=' + str(self._args)
if self._kwargs:
x += ', kwargs=' + str(self._kwargs)
if self._key[0] is not None:
x += ', exitprority=' + str(self._key[0])
return x + '>'
def _run_finalizers(minpriority=None):
'''
Run all finalizers whose exit priority is not None and at least minpriority
Finalizers with highest priority are called first; finalizers with
the same priority will be called in reverse order of creation.
'''
if _finalizer_registry is None:
# This function may be called after this module's globals are
# destroyed. See the _exit_function function in this module for more
# notes.
return
if minpriority is None:
f = lambda p : p[0][0] is not None
else:
f = lambda p : p[0][0] is not None and p[0][0] >= minpriority
items = [x for x in list(_finalizer_registry.items()) if f(x)]
items.sort(reverse=True)
for key, finalizer in items:
sub_debug('calling %s', finalizer)
try:
finalizer()
except Exception:
import traceback
traceback.print_exc()
if minpriority is None:
_finalizer_registry.clear()
#
# Clean up on exit
#
def is_exiting():
'''
Returns true if the process is shutting down
'''
return _exiting or _exiting is None
_exiting = False
def _exit_function(info=info, debug=debug, _run_finalizers=_run_finalizers,
active_children=active_children,
current_process=current_process):
# We hold on to references to functions in the arglist due to the
# situation described below, where this function is called after this
# module's globals are destroyed.
global _exiting
if not _exiting:
_exiting = True
info('process shutting down')
debug('running all "atexit" finalizers with priority >= 0')
_run_finalizers(0)
if current_process() is not None:
# We check if the current process is None here because if
# it's None, any call to ``active_children()`` will raise
# an AttributeError (active_children winds up trying to
# get attributes from util._current_process). One
# situation where this can happen is if someone has
# manipulated sys.modules, causing this module to be
# garbage collected. The destructor for the module type
# then replaces all values in the module dict with None.
# For instance, after setuptools runs a test it replaces
# sys.modules with a copy created earlier. See issues
# #9775 and #15881. Also related: #4106, #9205, and
# #9207.
for p in active_children():
if p._daemonic:
info('calling terminate() for daemon %s', p.name)
p._popen.terminate()
for p in active_children():
info('calling join() for process %s', p.name)
p.join()
debug('running the remaining "atexit" finalizers')
_run_finalizers()
atexit.register(_exit_function)
#
# Some fork aware types
#
class ForkAwareThreadLock(object):
def __init__(self):
self._reset()
register_after_fork(self, ForkAwareThreadLock._reset)
def _reset(self):
self._lock = threading.Lock()
self.acquire = self._lock.acquire
self.release = self._lock.release
class ForkAwareLocal(threading.local):
def __init__(self):
register_after_fork(self, lambda obj : obj.__dict__.clear())
def __reduce__(self):
return type(self), ()
|
Tatsh/youtube-dl | refs/heads/master | youtube_dl/extractor/azmedien.py | 4 | # coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from .kaltura import KalturaIE
class AZMedienIE(InfoExtractor):
IE_DESC = 'AZ Medien videos'
_VALID_URL = r'''(?x)
https?://
(?:www\.)?
(?P<host>
telezueri\.ch|
telebaern\.tv|
telem1\.ch
)/
[^/]+/
(?P<id>
[^/]+-(?P<article_id>\d+)
)
(?:
\#video=
(?P<kaltura_id>
[_0-9a-z]+
)
)?
'''
_TESTS = [{
'url': 'https://www.telezueri.ch/sonntalk/bundesrats-vakanzen-eu-rahmenabkommen-133214569',
'info_dict': {
'id': '1_anruz3wy',
'ext': 'mp4',
'title': 'Bundesrats-Vakanzen / EU-Rahmenabkommen',
'uploader_id': 'TVOnline',
'upload_date': '20180930',
'timestamp': 1538328802,
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.telebaern.tv/telebaern-news/montag-1-oktober-2018-ganze-sendung-133531189#video=0_7xjo9lf1',
'only_matching': True
}]
_API_TEMPL = 'https://www.%s/api/pub/gql/%s/NewsArticleTeaser/a4016f65fe62b81dc6664dd9f4910e4ab40383be'
_PARTNER_ID = '1719221'
def _real_extract(self, url):
host, display_id, article_id, entry_id = re.match(self._VALID_URL, url).groups()
if not entry_id:
entry_id = self._download_json(
self._API_TEMPL % (host, host.split('.')[0]), display_id, query={
'variables': json.dumps({
'contextId': 'NewsArticle:' + article_id,
}),
})['data']['context']['mainAsset']['video']['kaltura']['kalturaId']
return self.url_result(
'kaltura:%s:%s' % (self._PARTNER_ID, entry_id),
ie=KalturaIE.ie_key(), video_id=entry_id)
|
jonathanunderwood/outsider | refs/heads/master | setup.py | 1 | # This file is part of Outsider.
#
# Outsider is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Outsider is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outsider. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2015, 2016, 2017 Jonathan Underwood. All rights reserved.
"""Libraries and utilities for interfacing with the Blackstar ID range
of amplifiers.
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='outsider',
use_scm_version={
'write_to': "outsider/version.py",
},
setup_requires=['setuptools_scm'],
description='Utilities for interfacing with Blackstar ID amplifiers',
long_description=long_description,
url='https://github.com/jonathanunderwood/outsider',
author='Jonathan Underwood',
author_email='jonathan.underwood@gmail.com',
license='GPLv3+',
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
'Intended Audience :: End Users/Desktop',
'Topic :: Multimedia :: Sound/Audio',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='Blackstar Amplifiers',
packages=find_packages(exclude=['data', 'docs', 'tests*']),
install_requires=[
'PyUSB',
'PyQt5',
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
# extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
# },
package_data={
'outsider': ['outsider.ui'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'gui_scripts': [
'outsider = outsider.__main__:main',
],
},
)
|
whix/python | refs/heads/master | renzongxian/0007/0007.py | 40 | # Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-12-10
# Python 3.4
"""
第 0007 题:有个目录,里面是你自己写过的程序,统计一下你写过多少行代码。包括空行和注释,但是要分别列出来。
"""
import os
import sys
def code_lines(target_file):
# Declare returned values
total_lines = 0
empty_lines = 0
comment_lines = 0
file_object = open(target_file, 'r')
for line in file_object:
# Split the string
word_list = line.split()
if word_list == []:
empty_lines += 1
elif word_list[0] == '#':
comment_lines += 1
total_lines += 1
file_object.close()
return total_lines, empty_lines, comment_lines
if __name__ == "__main__":
t_lines = 0
e_lines = 0
c_lines = 0
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0007.py $dir_path'")
else:
for dir_path in sys.argv[1:]:
for file_name in os.listdir(dir_path):
file_path = os.path.join(dir_path, file_name)
t, e, c = code_lines(file_path)
t_lines += t
e_lines += e
c_lines += c
print("Total lines: %s. Empty lines: %s. Comment Lines: %s." % (t_lines, e_lines, c_lines)) |
nuxleus/cherokee-webserver | refs/heads/master | qa/131-PHP-zero-len-post+Redir.py | 8 | import os
from base import *
DIR1 = "131_post_zero_php_len1"
DIR2 = "131_post_zero_php_len2"
MAGIC = 'alvaro=alobbs.com'
FORBIDDEN = "Cherokee: The Open Source web server"
CONF = """
vserver!1!rule!1310!match = directory
vserver!1!rule!1310!match!directory = /%s
vserver!1!rule!1310!handler = redir
vserver!1!rule!1310!handler!rewrite!1!show = 0
vserver!1!rule!1310!handler!rewrite!1!substring = /%s/test.php
vserver!1!rule!1310!match!final = 1
""" % (DIR1, DIR2)
PHP_BASE = """
<?php
/* %s */
echo "POST alvaro=" . $_POST['alvaro'];
?>""" % (FORBIDDEN)
class Test (TestBase):
def __init__ (self):
TestBase.__init__ (self, __file__)
self.name = "PHP: Post with zero length (Redir)"
self.request = "POST /%s/ HTTP/1.0\r\n" % (DIR1) +\
"Content-type: application/x-www-form-urlencoded\r\n" +\
"Content-length: %d\r\n" % (len(MAGIC))
self.post = MAGIC
self.conf = CONF
self.expected_error = 200
self.expected_content = "POST %s" % (MAGIC)
def Prepare (self, www):
d = self.Mkdir (www, DIR2)
self.WriteFile (d, "test.php", 0755, PHP_BASE)
def Precondition (self):
return os.path.exists (look_for_php())
|
ajaxsys/dict-admin | refs/heads/master | pygments/styles/emacs.py | 75 | # -*- coding: utf-8 -*-
"""
pygments.styles.emacs
~~~~~~~~~~~~~~~~~~~~~
A highlighting style for Pygments, inspired by Emacs.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class EmacsStyle(Style):
"""
The default style (inspired by Emacs 22).
"""
background_color = "#f8f8f8"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #008800",
Comment.Preproc: "noitalic",
Comment.Special: "noitalic bold",
Keyword: "bold #AA22FF",
Keyword.Pseudo: "nobold",
Keyword.Type: "bold #00BB00",
Operator: "#666666",
Operator.Word: "bold #AA22FF",
Name.Builtin: "#AA22FF",
Name.Function: "#00A000",
Name.Class: "#0000FF",
Name.Namespace: "bold #0000FF",
Name.Exception: "bold #D2413A",
Name.Variable: "#B8860B",
Name.Constant: "#880000",
Name.Label: "#A0A000",
Name.Entity: "bold #999999",
Name.Attribute: "#BB4444",
Name.Tag: "bold #008000",
Name.Decorator: "#AA22FF",
String: "#BB4444",
String.Doc: "italic",
String.Interpol: "bold #BB6688",
String.Escape: "bold #BB6622",
String.Regex: "#BB6688",
String.Symbol: "#B8860B",
String.Other: "#008000",
Number: "#666666",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #000080",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
|
yury-s/v8-inspector | refs/heads/master | Source/chrome/build/android/pylib/gtest/__init__.py | 998 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
alrifqi/django | refs/heads/master | tests/order_with_respect_to/models.py | 165 | """
Tests for the order_with_respect_to Meta attribute.
"""
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class Question(models.Model):
text = models.CharField(max_length=200)
@python_2_unicode_compatible
class Answer(models.Model):
text = models.CharField(max_length=200)
question = models.ForeignKey(Question)
class Meta:
order_with_respect_to = 'question'
def __str__(self):
return six.text_type(self.text)
@python_2_unicode_compatible
class Post(models.Model):
title = models.CharField(max_length=200)
parent = models.ForeignKey("self", related_name="children", null=True)
class Meta:
order_with_respect_to = "parent"
def __str__(self):
return self.title
|
auduny/home-assistant | refs/heads/dev | homeassistant/components/yale_smart_alarm/alarm_control_panel.py | 7 | """Component for interacting with the Yale Smart Alarm System API."""
import logging
import voluptuous as vol
from homeassistant.components.alarm_control_panel import (
AlarmControlPanel, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_PASSWORD, CONF_USERNAME, CONF_NAME,
STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED)
import homeassistant.helpers.config_validation as cv
CONF_AREA_ID = 'area_id'
DEFAULT_NAME = 'Yale Smart Alarm'
DEFAULT_AREA_ID = '1'
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_AREA_ID, default=DEFAULT_AREA_ID): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the alarm platform."""
name = config[CONF_NAME]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
area_id = config[CONF_AREA_ID]
from yalesmartalarmclient.client import (
YaleSmartAlarmClient, AuthenticationError)
try:
client = YaleSmartAlarmClient(username, password, area_id)
except AuthenticationError:
_LOGGER.error("Authentication failed. Check credentials")
return
add_entities([YaleAlarmDevice(name, client)], True)
class YaleAlarmDevice(AlarmControlPanel):
"""Represent a Yale Smart Alarm."""
def __init__(self, name, client):
"""Initialize the Yale Alarm Device."""
self._name = name
self._client = client
self._state = None
from yalesmartalarmclient.client import (YALE_STATE_DISARM,
YALE_STATE_ARM_PARTIAL,
YALE_STATE_ARM_FULL)
self._state_map = {
YALE_STATE_DISARM: STATE_ALARM_DISARMED,
YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME,
YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY
}
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
def update(self):
"""Return the state of the device."""
armed_status = self._client.get_armed_status()
self._state = self._state_map.get(armed_status)
def alarm_disarm(self, code=None):
"""Send disarm command."""
self._client.disarm()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._client.arm_partial()
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._client.arm_full()
|
vikt0rs/oslo.db | refs/heads/master | tests/sqlalchemy/test_migrations.py | 3 | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2012-2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslotest import base as test_base
from oslo.db.sqlalchemy import test_migrations as migrate
class TestWalkVersions(test_base.BaseTestCase, migrate.WalkVersionsMixin):
def setUp(self):
super(TestWalkVersions, self).setUp()
self.migration_api = mock.MagicMock()
self.engine = mock.MagicMock()
self.REPOSITORY = mock.MagicMock()
self.INIT_VERSION = 4
def test_migrate_up(self):
self.migration_api.db_version.return_value = 141
self._migrate_up(self.engine, 141)
self.migration_api.upgrade.assert_called_with(
self.engine, self.REPOSITORY, 141)
self.migration_api.db_version.assert_called_with(
self.engine, self.REPOSITORY)
def test_migrate_up_with_data(self):
test_value = {"a": 1, "b": 2}
self.migration_api.db_version.return_value = 141
self._pre_upgrade_141 = mock.MagicMock()
self._pre_upgrade_141.return_value = test_value
self._check_141 = mock.MagicMock()
self._migrate_up(self.engine, 141, True)
self._pre_upgrade_141.assert_called_with(self.engine)
self._check_141.assert_called_with(self.engine, test_value)
def test_migrate_down(self):
self.migration_api.db_version.return_value = 42
self.assertTrue(self._migrate_down(self.engine, 42))
self.migration_api.db_version.assert_called_with(
self.engine, self.REPOSITORY)
def test_migrate_down_not_implemented(self):
self.migration_api.downgrade.side_effect = NotImplementedError
self.assertFalse(self._migrate_down(self.engine, 42))
def test_migrate_down_with_data(self):
self._post_downgrade_043 = mock.MagicMock()
self.migration_api.db_version.return_value = 42
self._migrate_down(self.engine, 42, True)
self._post_downgrade_043.assert_called_with(self.engine)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_default(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions()
self.migration_api.version_control.assert_called_with(
None, self.REPOSITORY, self.INIT_VERSION)
self.migration_api.db_version.assert_called_with(
None, self.REPOSITORY)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = [mock.call(None, v, with_data=True) for v in versions]
self.assertEqual(self._migrate_up.call_args_list, upgraded)
downgraded = [mock.call(None, v - 1) for v in reversed(versions)]
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_true(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=True, downgrade=True)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = []
for v in versions:
upgraded.append(mock.call(self.engine, v, with_data=True))
upgraded.append(mock.call(self.engine, v))
upgraded.extend(
[mock.call(self.engine, v) for v in reversed(versions)]
)
self.assertEqual(upgraded, self._migrate_up.call_args_list)
downgraded_1 = [
mock.call(self.engine, v - 1, with_data=True) for v in versions
]
downgraded_2 = []
for v in reversed(versions):
downgraded_2.append(mock.call(self.engine, v - 1))
downgraded_2.append(mock.call(self.engine, v - 1))
downgraded = downgraded_1 + downgraded_2
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_true_false(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=True, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = []
for v in versions:
upgraded.append(mock.call(self.engine, v, with_data=True))
upgraded.append(mock.call(self.engine, v))
self.assertEqual(upgraded, self._migrate_up.call_args_list)
downgraded = [
mock.call(self.engine, v - 1, with_data=True) for v in versions
]
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_false(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=False, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = [
mock.call(self.engine, v, with_data=True) for v in versions
]
self.assertEqual(upgraded, self._migrate_up.call_args_list)
|
flyfei/python-for-android | refs/heads/master | python-modules/twisted/twisted/conch/test/test_window.py | 78 |
"""
Tests for the insults windowing module, L{twisted.conch.insults.window}.
"""
from twisted.trial.unittest import TestCase
from twisted.conch.insults.window import TopWindow
class TopWindowTests(TestCase):
"""
Tests for L{TopWindow}, the root window container class.
"""
def test_paintScheduling(self):
"""
Verify that L{TopWindow.repaint} schedules an actual paint to occur
using the scheduling object passed to its initializer.
"""
paints = []
scheduled = []
root = TopWindow(lambda: paints.append(None), scheduled.append)
# Nothing should have happened yet.
self.assertEqual(paints, [])
self.assertEqual(scheduled, [])
# Cause a paint to be scheduled.
root.repaint()
self.assertEqual(paints, [])
self.assertEqual(len(scheduled), 1)
# Do another one to verify nothing else happens as long as the previous
# one is still pending.
root.repaint()
self.assertEqual(paints, [])
self.assertEqual(len(scheduled), 1)
# Run the actual paint call.
scheduled.pop()()
self.assertEqual(len(paints), 1)
self.assertEqual(scheduled, [])
# Do one more to verify that now that the previous one is finished
# future paints will succeed.
root.repaint()
self.assertEqual(len(paints), 1)
self.assertEqual(len(scheduled), 1)
|
roselleebarle04/django | refs/heads/master | tests/model_validation/tests.py | 292 | from django.core import management
from django.core.checks import Error, run_checks
from django.db.models.signals import post_init
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.utils import six
class OnPostInit(object):
def __call__(self, **kwargs):
pass
def on_post_init(**kwargs):
pass
@override_settings(
INSTALLED_APPS=['django.contrib.auth', 'django.contrib.contenttypes'],
SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True)
)
class ModelValidationTest(SimpleTestCase):
def test_models_validate(self):
# All our models should validate properly
# Validation Tests:
# * choices= Iterable of Iterables
# See: https://code.djangoproject.com/ticket/20430
# * related_name='+' doesn't clash with another '+'
# See: https://code.djangoproject.com/ticket/21375
management.call_command("check", stdout=six.StringIO())
def test_model_signal(self):
unresolved_references = post_init.unresolved_references.copy()
post_init.connect(on_post_init, sender='missing-app.Model')
post_init.connect(OnPostInit(), sender='missing-app.Model')
errors = run_checks()
expected = [
Error(
"The 'on_post_init' function was connected to the 'post_init' "
"signal with a lazy reference to the 'missing-app.Model' "
"sender, which has not been installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
),
Error(
"An instance of the 'OnPostInit' class was connected to "
"the 'post_init' signal with a lazy reference to the "
"'missing-app.Model' sender, which has not been installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
)
]
self.assertEqual(errors, expected)
post_init.unresolved_references = unresolved_references
|
cyberspace-kris/dnd5e | refs/heads/master | hitpoints.py | 1 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""HTML checkboxes for monster hit points."""
import argparse
import json
import numpy as np
import random
import sys
def roll(n, d):
"""Roll n d-sided dice."""
r = 0
for i in range(n):
r += random.randint(1, d)
return r
def parse(hitdice):
"""Parse hit dice string, e.g., 5d8+10."""
if "+" in hitdice:
a, m = [x.strip() for x in hitdice.split("+")]
n, d = [int(x.strip()) for x in a.split("d")]
modifier = int(m)
elif "-" in hitdice:
a, m = [x.strip() for x in hitdice.split("-")]
n, d = [int(x.strip()) for x in a.split("d")]
modifier = -int(m)
else:
n, d = [int(x.strip()) for x in hitdice.split("d")]
modifier = 0
return n, d, modifier
if __name__ == "__main__":
p = argparse.ArgumentParser(prog="hitpoints.py", description=__doc__)
p.add_argument("-n", "--number", dest="n", type=int, default=1,
help="number of creatures")
p.add_argument(dest="hitdice", nargs="?", type=str,
help="hitdice (3d8+9)")
args = p.parse_args()
n, d, modifier = parse(args.hitdice)
stats = []
for i in range(args.n):
hp = str(roll(n, d) + modifier)
stats.append(hp)
print(", ".join(stats))
|
marsop/machikoro | refs/heads/master | code/Player.py | 1 | from card import CardName
import abc
class Player(object):
__metaclass__ = abc.ABCMeta
def __init__(self, name):
self.name = name
self.playing_area = []
self.coins = 0
def buy_card(self, card):
self.coins -= card.cost
self.playing_area.append(card)
#print "player " + self.name + " has bought card " + card.card_name + " ("+ str(self.coins)+" coins left)"
def take_coins(self, amount):
self.coins += amount
#print "player " + self.name + " has now " + str(self.coins) + " coins"
def has(self, card_name):
result = [x for x in self.playing_area if x.card_name == card_name]
return len(result) != 0
def has_all_gold_cards(self):
return self.has(CardName.TRAIN_STATION) and self.has(CardName.SHOPPING_MALL) and self.has(CardName.AMUSEMENT_PARK) and self.has(CardName.RADIO_TOWER)
@abc.abstractmethod
def choose_action(self, game):
"""Decides which action to take"""
return
|
veger/ansible | refs/heads/devel | lib/ansible/modules/cloud/digital_ocean/digital_ocean_floating_ip_facts.py | 81 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (C) 2017-18, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'
}
DOCUMENTATION = '''
---
module: digital_ocean_floating_ip_facts
short_description: DigitalOcean Floating IPs facts
description:
- This module can be used to fetch DigitalOcean Floating IPs facts.
version_added: "2.5"
author: "Patrick Marques (@pmarques)"
extends_documentation_fragment: digital_ocean.documentation
notes:
- Version 2 of DigitalOcean API is used.
requirements:
- "python >= 2.6"
'''
EXAMPLES = '''
- name: "Gather facts about all Floating IPs"
digital_ocean_floating_ip_facts:
register: result
- name: "List of current floating ips"
debug: var=result.floating_ips
'''
RETURN = '''
# Digital Ocean API info https://developers.digitalocean.com/documentation/v2/#floating-ips
floating_ips:
description: a DigitalOcean Floating IP resource
returned: success and no resource constraint
type: list
sample: [
{
"ip": "45.55.96.47",
"droplet": null,
"region": {
"name": "New York 3",
"slug": "nyc3",
"sizes": [
"512mb",
"1gb",
"2gb",
"4gb",
"8gb",
"16gb",
"32gb",
"48gb",
"64gb"
],
"features": [
"private_networking",
"backups",
"ipv6",
"metadata"
],
"available": true
},
"locked": false
}
]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.digital_ocean import DigitalOceanHelper
from ansible.module_utils._text import to_native
def core(module):
rest = DigitalOceanHelper(module)
page = 1
has_next = True
floating_ips = []
status_code = None
while has_next or status_code != 200:
response = rest.get("floating_ips?page={0}&per_page=20".format(page))
status_code = response.status_code
# stop if any error during pagination
if status_code != 200:
break
page += 1
floating_ips.extend(response.json["floating_ips"])
has_next = "pages" in response.json["links"] and "next" in response.json["links"]["pages"]
if status_code == 200:
module.exit_json(changed=False, floating_ips=floating_ips)
else:
module.fail_json(msg="Error fetching facts [{0}: {1}]".format(
status_code, response.json["message"]))
def main():
module = AnsibleModule(
argument_spec=DigitalOceanHelper.digital_ocean_argument_spec()
)
try:
core(module)
except Exception as e:
module.fail_json(msg=to_native(e))
if __name__ == '__main__':
main()
|
maaruiz/Nominas2015ES | refs/heads/master | Procesos/Nomina/Editar/actualizar.py | 1 | """
Created on 04/10/2015
@author: miguelangel
"""
import locale
import time
from Funciones.funs import select_sql
from Funciones.Datos.nomina_dat import Nomina_Devengo
from Procesos.Nomina.Calcular.costes import CostesSegSocial
class Actualizar:
'''
classdocs
'''
def __init__(self, nomina = 0):
'''
Constructor
'''
self.nomina_id = nomina
self.costes = CostesSegSocial(self.nomina_id)
self.anio = self.costes.bases.nomina.fecha_anio
self.mes = self.costes.bases.nomina.fecha_mes
self.dia = self.costes.bases.nomina.dia_nomina
self.listadevengos = self.costes.bases.nomina.lista_devengos_nomina()
self.listadeducciones = self.costes.bases.nomina.lista_deducciones_nomina()
self.listapextras = self.costes.bases.nomina.lista_pagas_extras()
self.fecha = self.costes.bases.nomina.actualiza_fecha()
self.ac_devengos()
self.ac_periodo()
self.ac_campo('tot_dias', self.costes.bases.nomina.dias_cotizados())
self.ac_campo('base_cc', self.costes.bases.base_segsocial)
self.ac_campo('base_irpf', self.costes.bases.base_irpf)
self.ac_campo('base_ppextra', self.costes.bases.base_ppextra)
self.ac_campo('base_dfgsfp', self.costes.bases.base_segsocial)
self.ac_campo('imp_remumes', self.costes.bases.base_remuneracion)
if self.costes.bases.nomina.contrato.con_prorrata_pextra:
self.ac_campo('imp_pextra', self.costes.bases.base_ppextra)
self.ac_deducciones()
self.ac_aporta_trabajador()
self.ac_aportacion_empresa()
self.ac_totales()
def ac_aporta_trabajador(self):
sql = ('UPDATE '
' nominas '
'SET '
' imp_aportatrab = ' + str(self.costes.total_aportacion) +
' '
'WHERE '
' idnomina = %s')
select_sql((sql, (self.nomina_id)))
def ac_aportacion_empresa(self):
sql = ( "update "
"nominas "
"set "
"tipo_cc_empresa = " + str(self.costes.tipo_ccomun_emp) + ", "
"tipo_dp_empresa = " + str(self.costes.tipo_desempleo_emp) + ", "
"tipo_fp_empresa = " + str(self.costes.tipo_fp_emp) + ", "
"tipo_fgs_empresa = " + str(self.costes.tipo_fogasa_emp) + ", "
"imp_cc_empresa = " + str(self.costes.cont_comun_empresa) + ", "
"imp_dp_empresa = " + str(self.costes.desempleo_empresa) + ", "
"imp_fp_empresa = " + str(self.costes.formacion_prof_emp) + ", "
"imp_fgs_empresa = " + str(self.costes.fogasa_emp) + " "
"where "
"nominas.idnomina = %s;")
select_sql((sql, (self.nomina_id)),1)
def ac_campo(self, campo, valor):
sql = ("UPDATE "
"nominas "
"SET " +
campo + " = " + unicode(valor) + " "
"WHERE "
"idnomina = " + unicode(self.nomina_id) + " ")
select_sql(sql)
def ac_deducciones(self):
self.total_deducciones = 0
for ded in self.listadeducciones:
deducciones = Nomina_Devengo(ded[0])
self.total_deducciones = self.total_deducciones + deducciones.deducido
def ac_devengos(self):
self.total_devengo = 0
self.pextra = 0
for dev in self.listadevengos:
devengo = Nomina_Devengo(dev[0])
self.total_devengo = self.total_devengo + devengo.devengado
self.pextra = self.pextra + devengo.paga_extra()
for pex in self.listapextras:
pextras = self.pextra
devpextra = Nomina_Devengo(pex[0])
pextras = pextras * devpextra.coef_pextra
cuantia = self.costes.bases.nomina.dias_cotizados()
devpextra.ac_campo('imp_cuantia', cuantia)
if not devpextra.es_para_pextra:
precio = pextras / cuantia
devpextra.ac_campo('imp_devengo', pextras)
elif devpextra.es_para_pextra:
precio = float(devpextra.importe) / float(self.costes.bases.calendario.diastotales)
devpextra.ac_campo('imp_devengo', round(cuantia * precio, 2))
self.total_devengo += round(cuantia * precio, 2)
devpextra.ac_campo('imp_precio', precio)
devpextra(pex[0])
if self.costes.bases.nomina.contrato.con_prorrata_pextra:
self.total_devengo = self.total_devengo + self.costes.bases.base_ppextra
self.ac_campo('imp_totdev', self.total_devengo)
def ac_periodo(self):
locale.setlocale(locale.LC_ALL, 'es_ES.UTF-8')
estemes = time.strftime("%B", time.strptime(str(self.mes) +
',' + str(self.anio), '%m,%Y'))
p = self.costes.bases.nomina.periodos
p = ("'" +unicode(p[0].day) + " al " + unicode(p[1].day) + " de " +
unicode(estemes) + " " + unicode(self.anio)+ "'")
self.ac_campo('periodo', p)
return p
def ac_totales(self):
#----------------- Calculo Total Devengado y Total Deducible
sql = ("SELECT "
"sum(imp_devengo), "
"sum(imp_deduccion) "
"FROM "
"nomina_devengos "
"WHERE "
"idnomina = %s;")
select_sql((sql, (self.nomina_id)))
sql = ("UPDATE "
"nominas "
"SET "
"imp_totdev = %s, "
"tot_deducir = %s "
"WHERE "
"idnomina = %s ")
select_sql((sql, (self.total_devengo, self.total_deducciones, self.nomina_id)))
|
Shraddha512/servo | refs/heads/master | components/script/dom/bindings/codegen/parser/tests/test_union.py | 134 | import WebIDL
import itertools
import string
# We'd like to use itertools.chain but it's 2.6 or higher.
def chain(*iterables):
# chain('ABC', 'DEF') --> A B C D E F
for it in iterables:
for element in it:
yield element
# We'd like to use itertools.combinations but it's 2.6 or higher.
def combinations(iterable, r):
# combinations('ABCD', 2) --> AB AC AD BC BD CD
# combinations(range(4), 3) --> 012 013 023 123
pool = tuple(iterable)
n = len(pool)
if r > n:
return
indices = range(r)
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
# We'd like to use itertools.combinations_with_replacement but it's 2.7 or
# higher.
def combinations_with_replacement(iterable, r):
# combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC
pool = tuple(iterable)
n = len(pool)
if not n and r:
return
indices = [0] * r
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != n - 1:
break
else:
return
indices[i:] = [indices[i] + 1] * (r - i)
yield tuple(pool[i] for i in indices)
def WebIDLTest(parser, harness):
types = ["float",
"double",
"short",
"unsigned short",
"long",
"unsigned long",
"long long",
"unsigned long long",
"boolean",
"byte",
"octet",
"DOMString",
#"sequence<float>",
"object",
"ArrayBuffer",
#"Date",
"TestInterface1",
"TestInterface2"]
testPre = """
interface TestInterface1 {
};
interface TestInterface2 {
};
"""
interface = testPre + """
interface PrepareForTest {
"""
for (i, type) in enumerate(types):
interface += string.Template("""
readonly attribute ${type} attr${i};
""").substitute(i=i, type=type)
interface += """
};
"""
parser.parse(interface)
results = parser.finish()
iface = results[2]
parser = parser.reset()
def typesAreDistinguishable(t):
return all(u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2))
def typesAreNotDistinguishable(t):
return any(not u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2))
def unionTypeName(t):
if len(t) > 2:
t[0:2] = [unionTypeName(t[0:2])]
return "(" + " or ".join(t) + ")"
# typeCombinations is an iterable of tuples containing the name of the type
# as a string and the parsed IDL type.
def unionTypes(typeCombinations, predicate):
for c in typeCombinations:
if predicate(t[1] for t in c):
yield unionTypeName([t[0] for t in c])
# We limit invalid union types with a union member type to the subset of 3
# types with one invalid combination.
# typeCombinations is an iterable of tuples containing the name of the type
# as a string and the parsed IDL type.
def invalidUnionWithUnion(typeCombinations):
for c in typeCombinations:
if (typesAreNotDistinguishable((c[0][1], c[1][1])) and
typesAreDistinguishable((c[1][1], c[2][1])) and
typesAreDistinguishable((c[0][1], c[2][1]))):
yield unionTypeName([t[0] for t in c])
# Create a list of tuples containing the name of the type as a string and
# the parsed IDL type.
types = zip(types, (a.type for a in iface.members))
validUnionTypes = chain(unionTypes(combinations(types, 2), typesAreDistinguishable),
unionTypes(combinations(types, 3), typesAreDistinguishable))
invalidUnionTypes = chain(unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable),
invalidUnionWithUnion(combinations(types, 3)))
interface = testPre + """
interface TestUnion {
"""
for (i, type) in enumerate(validUnionTypes):
interface += string.Template("""
void method${i}(${type} arg);
${type} returnMethod${i}();
attribute ${type} attr${i};
void arrayMethod${i}(${type}[] arg);
${type}[] arrayReturnMethod${i}();
attribute ${type}[] arrayAttr${i};
void optionalMethod${i}(${type}? arg);
""").substitute(i=i, type=type)
interface += """
};
"""
parser.parse(interface)
results = parser.finish()
parser = parser.reset()
for invalid in invalidUnionTypes:
interface = testPre + string.Template("""
interface TestUnion {
void method(${type} arg);
};
""").substitute(type=invalid)
threw = False
try:
parser.parse(interface)
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset()
|
eroicaleo/LearningPython | refs/heads/master | ch31/bound.py | 1 | #!/usr/bin/env python
class Spam:
def doit(self, message):
print(message)
object1 = Spam()
object1.doit('hello world!')
x = object1.doit
x('hello world!')
t = Spam.doit
t(object1, 'howdy!')
class Eggs:
def m1(self, n):
print(n)
def m2(self):
x = self.m1
x(42)
Eggs().m2()
class Selfless:
def __init__(self, data):
self.data = data
def selfless(args1, args2):
return args1 + args2
def normal(self, args1, args2):
return self.data + args1 + args2
X = Selfless(2)
print(X.normal(3, 4))
print(Selfless.normal(X, 3, 4))
print(Selfless.selfless(3, 4))
# The following twos have errors
# X.selfless(3, 4)
# Selfless.normal(3, 4)
class Number:
def __init__(self, base):
self.base = base
def double(self):
return self.base * 2
def triple(self):
return self.base * 3
x = Number(2)
y = Number(3)
z = Number(4)
print(x.double())
acts = [x.double, y.double, y.triple, z.double]
for act in acts:
print(act())
bound = x.double
print(bound.__self__, bound.__func__)
print(bound.__self__.base)
def square(arg):
return arg ** 2
class Sum:
def __init__(self, val):
self.val = val
def __call__(self, arg):
return self.val + arg
class Product:
def __init__(self, val):
self.val = val
def method(self, arg):
return self.val * arg
sobject = Sum(2)
pobject = Product(3)
actions = [square, sobject, pobject.method]
for act in actions:
print(act(5))
print(actions[-1](5))
print([act(5) for act in actions])
print(list(map(lambda act: act(5), actions)))
class Negate:
def __init__(self, val):
self.val = -val
def __repr__(self):
return str(self.val)
actions = [square, sobject, pobject.method, Negate]
print([act(5) for act in actions])
table = {act(5) : act for act in actions}
for (key, value) in table.items():
print('%2s => %s' % (key, value))
# print('{0:2} => {1}'.format(key, value))
|
hmoco/osf.io | refs/heads/develop | api_tests/search/serializers/test_serializers.py | 6 | from modularodm import Q
from nose.tools import * # flake8: noqa
from api.search.serializers import SearchSerializer
from api_tests import utils
from tests.base import DbTestCase
from osf_tests.factories import (
AuthUserFactory,
NodeFactory,
ProjectFactory,
)
from tests.utils import make_drf_request_with_version, mock_archive
from website.models import MetaSchema
from website.project.model import ensure_schemas
from website.project.metadata.schemas import LATEST_SCHEMA_VERSION
from website.search import search
class TestSearchSerializer(DbTestCase):
def setUp(self):
super(TestSearchSerializer, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user, is_public=True)
self.component = NodeFactory(parent=self.project, creator=self.user, is_public=True)
self.file = utils.create_test_file(self.component, self.user)
ensure_schemas()
self.schema = MetaSchema.find_one(
Q('name', 'eq', 'Replication Recipe (Brandt et al., 2013): Post-Completion') &
Q('schema_version', 'eq', LATEST_SCHEMA_VERSION)
)
with mock_archive(self.project, autocomplete=True, autoapprove=True, schema=self.schema) as registration:
self.registration = registration
def tearDown(self):
super(TestSearchSerializer, self).tearDown()
search.delete_all()
def test_search_serializer_mixed_model_project(self):
req = make_drf_request_with_version(version='2.0')
result = SearchSerializer(self.project, context={'request': req}).data
assert_equal(result['data']['type'], 'nodes')
def test_search_serializer_mixed_model_component(self):
req = make_drf_request_with_version(version='2.0')
result = SearchSerializer(self.component, context={'request': req}).data
assert_equal(result['data']['type'], 'nodes')
def test_search_serializer_mixed_model_registration(self):
req = make_drf_request_with_version(version='2.0')
result = SearchSerializer(self.registration, context={'request': req}).data
assert_equal(result['data']['type'], 'registrations')
def test_search_serializer_mixed_model_file(self):
req = make_drf_request_with_version(version='2.0')
result = SearchSerializer(self.file, context={'request': req}).data
assert_equal(result['data']['type'], 'files')
def test_search_serializer_mixed_model_user(self):
req = make_drf_request_with_version(version='2.0')
result = SearchSerializer(self.user, context={'request': req}).data
assert_equal(result['data']['type'], 'users')
|
mfm24/ChordViz | refs/heads/master | ChordViz.py | 1 | # -*- coding: utf-8 -*-
"""
Created on Fri May 3 21:09:10 2013
@author: matt
# based on MyPlayer3_Callback (which is newer than MyPlayer3.py)
"""
from __future__ import division
import time, math, logging
import numpy as np
from threading import Lock, Thread
import itertools
# not sure I've added correct path in launchd.conf
# and export doesn't obviously work
import sys
sys.path.append('/Users/matt/Dropbox/personal/dev/PythonLibs/')
try:
from uidecorators import ui_decorators
use_ui = True
except ImportError:
# a bit nasty. We'll create an object were all members
# return a decorator function returning a decorator that does nothing!
class FakeUIDec:
def __getattr__(self, name):
def no_wrap(*args, **kwargs):
def wrap_creator(func):
def w(*args, **kwargs):
func(*args, **kwargs)
return w
return wrap_creator
return no_wrap
ui_decorators = FakeUIDec()
use_ui=False
try:
import pyaudio
p = pyaudio.PyAudio()
has_pyaudio = True
except ImportError:
logging.warn("PyAudio not found! - Will not be able to output any audio!")
has_pyaudio = False
def play_waveform(w):
def callback(in_data, frame_count, time_info, status):
# this requests upto 1024 frames?
with w.datalock:
ndata = w.data
if ndata is not None:
return (np.hstack([ndata]*(frame_count//1024)), pyaudio.paContinue)
else:
return (None, pyaudio.paComplete)
if has_pyaudio:
# open stream using callback (3)
play_waveform.stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=w.rate,
output=True,
frames_per_buffer=w.size,
stream_callback=callback)
play_waveform.stream = None
max_frequency = 22100 # we stop making notes above this
note_types = {
"PureTone": lambda harmonic: 1 if harmonic==0 else 0,
"Poisson0.5": lambda harmonic: poisson(0.5, harmonic),
"Poisson1": lambda harmonic: poisson(1, harmonic),
"Poisson2": lambda harmonic: poisson(2, harmonic),
"Poisson3": lambda harmonic: poisson(3, harmonic),
"Lorentz1": lambda harmonic: 1.0/(1.0+harmonic**2),
"Lorentz10": lambda harmonic: 10.0/(10.0+harmonic**2),
"Equal": lambda harmonic: 1,
"EqualOdd": lambda harmonic: 1 if harmonic%2==1 or harmonic==0 else 0,
"EqualEven": lambda harmonic: 1 if harmonic%2==0 else 0,
"OneOverX": lambda harmonic: 1.0/(harmonic+1.0)
}
equal_temperament_notes = [2 ** (x / 12.0) for x in range(12)]
just_intonation_notes = [1, 16 / 15., 9 / 8., 6 / 5., 5 / 4., 4 / 3., 45 / 32., 3 / 2., 8 / 5., 5 / 3., 16 / 9., 15 / 8.]
twelve_tone_names = ["I", "IIb", "II", "IIIb", "III", "IV", "IV#", "V", "VIb", "VI", "VIIb", "VII"]
class Waveform(object):
def __init__(self, size=1024*16, rate=44100):
self.size = size
self.rate = rate
self.data = np.zeros((size), dtype=np.int16)
self.datalock = Lock()
self.volume_amp = 0.1
self.form = lambda note: poisson(2, note)
self.notetype="Poisson1"
self.notefreq=440
self.on_notes_changed=[]
self._harmonics_slice = None
self.clear_notes()
def clear_notes(self):
self.notes = []
self()
def set_notes(self, notes):
self.clear_notes()
self.add_notes(notes)
self()
def add_notes(self, notes):
self.notes.append(list(notes))
self()
def __call__(self):
newdata = np.zeros((self.size), dtype=np.complex64)
for notegroup in self.notes:
for freq, mag in notegroup:
dphase=int (freq*self.size / self.rate )
logging.info("Adding note at pixel %s", dphase)
if dphase > len(newdata)/2:
continue # this is nyquist, can't go any higher
#let's scale mag by number of notes
newdata[dphase]=self.volume_amp*mag*32765/2
#make ft real
newdata[-dphase] = np.conj(newdata[dphase])
sqrtsumsq = math.sqrt((newdata**2).sum())
if sqrtsumsq:
newdata *= self.volume_amp * 2.0 * 32767.0 / sqrtsumsq
printimag = 0
if printimag:
complex_d=np.imag(np.fft.fft(newdata));
print "imag magnitude: ", np.sqrt(np.sum(complex_d**2))
newdata = np.asarray(np.real(np.fft.fft(newdata)), dtype=np.int16)
with self.datalock:
self.data = newdata
for f in self.on_notes_changed:
f()
def get_volume(self):
v = math.log(self.volume_amp, 10)*20
return v
@ui_decorators.slider(getfunc=get_volume, maximum=0, minimum=-50, scale=1)
def volume(self, value):
self.volume_amp = 10**(value/20.0)
self()
def get_note_type(self):
return self.notetype
@ui_decorators.combobox(
getfunc=get_note_type,
options=note_types.keys())
def note_type(self, t):
self.notetype = t
def get_harmonics_slice(self):
if self._harmonics_slice:
return ",".join(self._harmonics_slice)
else:
return ""
@ui_decorators.textbox(getfunc=get_harmonics_slice)
def harmonics_slice(self, n):
"""
Sets the harmonics to display
Should be either [start:]stop[:step]
or else a,b,c where a,b,c are indices to choose
"""
if n=="":
self._harmonics_slice = None
return
if ':' in n:
sc = [int(x or "0") for x in n.split(":")]
if len(sc)==1:
self._harmonics_slice = (None, sc[0], None)
elif len(sc) == 2:
self._harmonics_slice = (sc[0], sc[1], None)
else:
self._harmonics_slice = (sc[0], sc[1], sc[2])
else:
self._harmonics_slice = [int(x or "-1") for x in n.split(',')]
def get_root_frequency(self):
return self.notefreq
@ui_decorators.textbox(getfunc=get_root_frequency)
def root_frequency(self, val):
self.notefreq = float(val)
self()
def add_form(self, root):
if isinstance(self._harmonics_slice, list):
all_notes = list(notes_from_func(note_types[self.notetype], root))
notes = []
for i in self._harmonics_slice:
notes.append(all_notes[i])
else:
slice_args = self._harmonics_slice or (None,)
notes = itertools.islice(
notes_from_func(note_types[self.notetype], root),
*slice_args)
self.add_notes(notes)
@ui_decorators.button
def clear(self):
self.clear_notes()
@ui_decorators.button
def note_root(self):
self.add_form(self.notefreq)
self()
@ui_decorators.button
def note_major3rd(self):
self.add_form(self.notefreq*5.0/4.0)
self()
@ui_decorators.button
def note_fifth(self):
self.add_form(self.notefreq*6.0/4.0)
self()
@ui_decorators.button
def play_major_chord(self):
self.play_threaded_chord([self.notefreq,
self.notefreq*5.0/4.0,
self.notefreq*6.0/4.0])
@ui_decorators.button
def test(self):
self.play_threaded_chord([self.notefreq,
self.notefreq*7.0/8.0,
self.notefreq*6.0/4.0])
@ui_decorators.button
def play_minor_chord(self):
self.play_threaded_chord([self.notefreq,
self.notefreq*12.0/10.0,
self.notefreq*15.0/10.0])
@ui_decorators.button
def play_minor_chord_fifth(self):
self.play_threaded_chord([self.notefreq,
self.notefreq*4.0/3.0,
self.notefreq*8.0/5.0])
def play_threaded_chord(self, roots):
def run_through():
for i,n in enumerate(roots):
self.clear_notes()
[self.add_form([]) for t in range(i)]
self.add_form(n)
time.sleep(1.5)
self.clear_notes()
for n in roots:
self.add_form(n)
Thread(target=run_through).start()
# run in interactive shell and use set_notes to play?
def poisson(l, n):
return math.exp(-l)*l**n/math.factorial(n)
def notes_from_func(func, root):
for h in itertools.count():
mag = func(h)
# we cut off until we reach max_frequency
if root+root*h > max_frequency:
return
yield root+root*h, mag
def cleanup():
if has_pyaudio:
play_waveform.stream.close()
p.terminate()
######################## UI Stuff ############################
# this could go in a separate file, but keeping it here for the
# moment
# creating a UI Options class for modifying the visualisation using
# out qt decorators
class UIOptions:
def __init__(self):
self._linear_freq_in_octaves = True
self.virtual_size = 1500,1500
self._inverse = True
self._show_just_notes = True
self._show_base_spiral = True
self._show_ET_notes = False # ET=equal temperament
def get_linear_freq_in_octaves(self):
return self._linear_freq_in_octaves
@ui_decorators.checkbox(getfunc=get_linear_freq_in_octaves)
def linear_freq_in_octaves(self, newval):
self._linear_freq_in_octaves = newval
notes_changed()
def get_show_base_spiral(self):
return self._show_base_spiral
@ui_decorators.checkbox(getfunc=get_show_base_spiral)
def show_base_spiral(self, newval):
self._show_base_spiral = newval
notes_changed()
def get_inverse(self):
return self._inverse
@ui_decorators.checkbox(getfunc=get_inverse)
def inverse(self, newval):
self._inverse = newval
notes_changed()
def get_show_just_notes(self):
return self._show_just_notes
@ui_decorators.checkbox(getfunc=get_show_just_notes)
def show_just_notes(self, newval):
self._show_just_notes = newval
notes_changed()
def get_show_ET_notes(self):
return self._show_ET_notes
@ui_decorators.checkbox(getfunc=get_show_ET_notes)
def show_ET_notes(self, newval):
self._show_ET_notes = newval
notes_changed()
def make_note_lines(root, named_notes, width, radius):
"""
For the dictionary named_notes, draws thin lines for each note
adding the key for the note to the SVG.
This way we can overlay scales on the diagrams.
"""
lines = []
for name, freq in named_notes.iteritems():
(x1, y1), theta = get_pos_theta_for_note(freq, root, 0, 0)
font_size = radius/16.0
lines.append(
'<line x1="{x1}" y1="{y1}" x2="{x2}" y2="{y2}" stroke-width="{width}"/>'.format(
x1=x1, x2=x1 + 2 * radius * math.sin(theta),
y1=y1, y2=y1 - 2 * radius * math.cos(theta),
width=width))
lines.append('<text x="{x}" y="{y}" font-size="{fs}">{text}</text>'.format(
x=x1 + radius * math.sin(theta),
y=y1 - radius * math.cos(theta),
text=name, fs=font_size))
return "\n".join(lines)
def get_pos_theta_for_note(f, root, root_radius, length):
"""
Return (x,y),theta where (x,y) is the starting position of the note
and theta is the angle the note should have
"""
# first, we calculate the octave and theta for the root
logf = math.log(f / root, 2)
note_fraction, octave = math.modf(logf)
if ui_opts.get_linear_freq_in_octaves():
note = (2**note_fraction - 1)
else:
note = note_fraction
theta = note * 2.0 * math.pi
centerx, centery = (x / 2 for x in ui_opts.virtual_size)
r = root_radius + (octave + note_fraction) * length
x = centerx + r * math.sin(theta)
y = centery - r * math.cos(theta)
return (x,y), theta
def make_spiral_lines_from_notes(root, notes,
length=75, root_radius=100,
stroke_width_scale=15):
"""
Is there a way to represent notes where octaves are still seperated but
we can see notes of the same pitch?
We could draw a spiral, where an octave is 360 degrees and on the next
ring out.
There's a similar idea here:
http://nastechservices.com/Spectrograms.html
How should we represent a 3:2 ratio? If wejust take log(x,2)*2*pi
then 3/2 is at 210deg (or 3.67rad). Is it worth making the scale linear,
and putting 3/2 at 180deg? We could also spiral so that 3/2f gets us to 180
deg then we stretch out the remaining part of the curve?
We'll try the linear for now.
It works, but not all 3/2 notes are 180deg from each other
(if the higher note is past the root, it's not)
Is there a way to do this? Maybe not, eg we make 5th = 3r/2 opposite root
and 3/2r = 9/4 != root and yet root still needs to be 180deg from it
"""
width_gamma = 0.2 # we use width^this as the width
centerx, centery = (x / 2 for x in ui_opts.virtual_size)
lines = []
for f, m in notes:
# we split the note into octave and note (0 - 1)
width = stroke_width_scale * math.pow(m, width_gamma)
(x1, y1), theta = get_pos_theta_for_note(f, root, root_radius, length)
x2 = x1 + 0.9 * length * math.sin(theta)
y2 = y1 - 0.9 * length * math.cos(theta)
lines.append('<line x1="{x1}" y1="{y1}" x2="{x2}" y2="{y2}" stroke-width="{width}"/>'.format(
x1=x1, x2=x2, y1=y1, y2=y2,
width=width))
return "\n".join(lines)
def make_spiral_octave_lines(root, length=75, root_radius=100, max_f=22100):
"""
Starting with the root note, draw the spiral on which
any higher frequency notes will sit. This way we can count
harmonics more easily
"""
width = 0.5
(x1, y1), _ = get_pos_theta_for_note(root, root, root_radius, length)
lines = []
step = int(root/50) or 1
for f in range(int(root), int(max_f), step):
(x2, y2), theta = get_pos_theta_for_note(f, root, root_radius, length)
lines.append('<line x1="{x1}" y1="{y1}" x2="{x2}" y2="{y2}" stroke-width="{width}"/>'.format(
x1=x1, x2=x2, y1=y1, y2=y2,
width=width))
x1, y1 = x2, y2
return "\n".join(lines)
rgb_colors = [0xFF0000, 0x00FF00, 0x0000FF]
cym_colors = [0x00FFFF, 0xFF00FF, 0xFFFF00]
white = 0xFFFFFFFF
black = 0xFF000000
# some QT specific stuff follows:
import PySide.QtCore
import PySide.QtGui
import PySide.QtSvg
def render_svg(svg, qp):
r = PySide.QtSvg.QSvgRenderer()
w,h = ui_opts.virtual_size
ret = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="{}" height="{}">'.format(w, h)
ret += svg
ret += "</svg>"
# print ret
r.load(PySide.QtCore.QByteArray(ret))
assert r.isValid()
r.render(qp)
def raw_svg_to_group(svg, color, extras=""):
ret = '<g stroke="#{0:06X}" fill="#{0:06X}" {1}>'.format(
color & 0xFFFFFF, extras)
ret += svg
ret += "</g>"
return ret
from uidecorators.qt_framework import Framework
def notes_changed(*args):
mode = "inverse" if ui_opts.get_inverse() else "normal"
qim = PySide.QtGui.QImage(d.widget().width(), d.widget().height(), PySide.QtGui.QImage.Format.Format_ARGB32)
qp = PySide.QtGui.QPainter(qim)
qp.setRenderHint(qp.Antialiasing)
qp.setRenderHint(qp.SmoothPixmapTransform)
if mode == "inverse":
#qim.fill(white)
qp.setCompositionMode(qp.CompositionMode.CompositionMode_Darken)
colors = cym_colors
default_foreground = black
default_background = white
mode = "darken"
else:
#qim.fill(black)
qp.setCompositionMode(qp.CompositionMode.CompositionMode_Lighten)
colors = rgb_colors
default_foreground = white
default_background = black
mode = "lighten"
default_foreground = 0x888888
root = w.get_root_frequency()
all_svgs=[]
num_octaves = math.log(max_frequency / root, 2)
# let's scale note height and width with number of octaves we're drawing
note_length = 400.0 / num_octaves
note_width = 500 / 2**num_octaves
# we'll set the background with a svg rect
svg = raw_svg_to_group('<rect width="1500" height="1500" />', default_background)
all_svgs.append(svg)
for check, notes in [(ui_opts.get_show_just_notes, just_intonation_notes),
(ui_opts.get_show_ET_notes, equal_temperament_notes)]:
if check():
overlay = make_note_lines(
root,
{i: f * root for i, f in zip(twelve_tone_names, notes)},
0.5, 600)
svg = raw_svg_to_group(overlay, default_foreground)
all_svgs.append(svg)
if ui_opts.get_show_base_spiral():
overlay = make_spiral_octave_lines(root, length=note_length)
svg = raw_svg_to_group(overlay, default_foreground)
all_svgs.append(svg)
theta = 0
width, height = ui_opts.virtual_size
for notegroup, col in zip(w.notes, colors):
notegrp_svg = make_spiral_lines_from_notes(
root, notegroup, length=note_length, stroke_width_scale=note_width)
notegrp_svg += '<circle r="{}" cx="{}" cy="{}"/>'.format(
width / 30.0, width / 10.0 + width / 45.0 * math.sin(theta),
width / 10.0 + width / 45.0 * math.cos(theta))
theta += math.pi*2.0/len(w.notes)
# convert to a svg group with some extra tags to make inkscape happy
svg = raw_svg_to_group(
notegrp_svg, col,
extras='inkscape:groupmode="layer" filter="url(#blend)"')
all_svgs.append(svg)
# finally we'll render tham all
for svg in all_svgs:
render_svg(svg, qp)
# try to save an inkscape compatible svg file.
# we can add a darken/lighten filter, and we need to add
# enable-background="new" to the svg header and the
# inkscape ns:
with open("out.svg", 'w') as f:
f.write('<svg xmlns="http://www.w3.org/2000/svg" '
'xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" '
'version="1.1" width="{}" height="{}" '
'enable-background="new">'.format(width, height))
f.write('<filter id="blend">'
'<feBlend in2="BackgroundImage" mode="{0}" />'
'</filter>'.format(mode))
f.write("\n".join(all_svgs))
f.write("</svg>")
d.widget().setPixmap(PySide.QtGui.QPixmap.fromImage(qim))
# qim.save("out.png", 'PNG')
qp = None # we have to make sure qim is deleted before QPainter?
if __name__=="__main__":
w=Waveform()
play_waveform(w)
if use_ui:
ui_opts = UIOptions()
f = Framework()
f.get_main_window().resize(800,600)
d=PySide.QtGui.QDockWidget("Note Visualization")
d.setWidget(PySide.QtGui.QLabel())
f.get_main_window().addDockWidget(PySide.QtCore.Qt.RightDockWidgetArea, d)
# play notes is threaded, so we need to call notes_changed from the
# ui thread.
w.on_notes_changed.append(lambda: f.run_on_ui_thread(notes_changed))
f.display_widgets([f.get_obj_widget(w), f.get_obj_widget(ui_opts)])
f.close()
|
sontek/rethinkdb | refs/heads/next | external/re2_20140111/re2/make_unicode_groups.py | 121 | #!/usr/bin/python
# Copyright 2008 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Generate C++ tables for Unicode Script and Category groups."""
import sys
import unicode
_header = """
// GENERATED BY make_unicode_groups.py; DO NOT EDIT.
// make_unicode_groups.py >unicode_groups.cc
#include "re2/unicode_groups.h"
namespace re2 {
"""
_trailer = """
} // namespace re2
"""
n16 = 0
n32 = 0
def MakeRanges(codes):
"""Turn a list like [1,2,3,7,8,9] into a range list [[1,3], [7,9]]"""
ranges = []
last = -100
for c in codes:
if c == last+1:
ranges[-1][1] = c
else:
ranges.append([c, c])
last = c
return ranges
def PrintRanges(type, name, ranges):
"""Print the ranges as an array of type named name."""
print "static const %s %s[] = {" % (type, name,)
for lo, hi in ranges:
print "\t{ %d, %d }," % (lo, hi)
print "};"
# def PrintCodes(type, name, codes):
# """Print the codes as an array of type named name."""
# print "static %s %s[] = {" % (type, name,)
# for c in codes:
# print "\t%d," % (c,)
# print "};"
def PrintGroup(name, codes):
"""Print the data structures for the group of codes.
Return a UGroup literal for the group."""
# See unicode_groups.h for a description of the data structure.
# Split codes into 16-bit ranges and 32-bit ranges.
range16 = MakeRanges([c for c in codes if c < 65536])
range32 = MakeRanges([c for c in codes if c >= 65536])
# Pull singleton ranges out of range16.
# code16 = [lo for lo, hi in range16 if lo == hi]
# range16 = [[lo, hi] for lo, hi in range16 if lo != hi]
global n16
global n32
n16 += len(range16)
n32 += len(range32)
ugroup = "{ \"%s\", +1" % (name,)
# if len(code16) > 0:
# PrintCodes("uint16", name+"_code16", code16)
# ugroup += ", %s_code16, %d" % (name, len(code16))
# else:
# ugroup += ", 0, 0"
if len(range16) > 0:
PrintRanges("URange16", name+"_range16", range16)
ugroup += ", %s_range16, %d" % (name, len(range16))
else:
ugroup += ", 0, 0"
if len(range32) > 0:
PrintRanges("URange32", name+"_range32", range32)
ugroup += ", %s_range32, %d" % (name, len(range32))
else:
ugroup += ", 0, 0"
ugroup += " }"
return ugroup
def main():
print _header
ugroups = []
for name, codes in unicode.Categories().iteritems():
ugroups.append(PrintGroup(name, codes))
for name, codes in unicode.Scripts().iteritems():
ugroups.append(PrintGroup(name, codes))
print "// %d 16-bit ranges, %d 32-bit ranges" % (n16, n32)
print "const UGroup unicode_groups[] = {";
ugroups.sort()
for ug in ugroups:
print "\t%s," % (ug,)
print "};"
print "const int num_unicode_groups = %d;" % (len(ugroups),)
print _trailer
if __name__ == '__main__':
main()
|
runt18/mojo | refs/heads/master | third_party/cython/src/Cython/Compiler/Future.py | 101 | def _get_feature(name):
import __future__
# fall back to a unique fake object for earlier Python versions or Python 3
return getattr(__future__, name, object())
unicode_literals = _get_feature("unicode_literals")
with_statement = _get_feature("with_statement")
division = _get_feature("division")
print_function = _get_feature("print_function")
absolute_import = _get_feature("absolute_import")
nested_scopes = _get_feature("nested_scopes") # dummy
generators = _get_feature("generators") # dummy
del _get_feature
|
moritzschaefer/luigi | refs/heads/master | test/import_test.py | 46 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from helpers import unittest
class ImportTest(unittest.TestCase):
def import_test(self):
"""Test that all module can be imported
"""
luigidir = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..'
)
packagedir = os.path.join(luigidir, 'luigi')
for root, subdirs, files in os.walk(packagedir):
package = os.path.relpath(root, luigidir).replace('/', '.')
if '__init__.py' in files:
__import__(package)
for f in files:
if f.endswith('.py') and not f.startswith('_'):
__import__(package + '.' + f[:-3])
def import_luigi_test(self):
"""
Test that the top luigi package can be imported and contains the usual suspects.
"""
import luigi
# These should exist (if not, this will cause AttributeErrors)
expected = [
luigi.Event,
luigi.Config,
luigi.Task, luigi.ExternalTask, luigi.WrapperTask,
luigi.Target, luigi.LocalTarget, luigi.File,
luigi.namespace,
luigi.RemoteScheduler,
luigi.RPCError,
luigi.run, luigi.build,
luigi.Parameter,
luigi.DateHourParameter, luigi.DateMinuteParameter, luigi.DateParameter,
luigi.MonthParameter, luigi.YearParameter,
luigi.DateIntervalParameter, luigi.TimeDeltaParameter,
luigi.IntParameter, luigi.FloatParameter,
luigi.BooleanParameter, luigi.BoolParameter,
]
self.assertGreater(len(expected), 0)
|
nrupatunga/PY-GOTURN | refs/heads/goturn-dev | goturn/loader/loader_imagenet.py | 1 | # Date: Nrupatunga: Tuesday 04 July 2017
# Email: nrupatunga@whodat.com
# Name: Nrupatunga
# Description: loading Imagenet dataset
from __future__ import print_function
import os
import cv2
import glob
from annotation import annotation
import xml.etree.ElementTree as ET
from ..logger.logger import setup_logger
from ..helper import config
kMaxRatio = 0.66
class loader_imagenet:
"""Docstring for loader_imagenetdet. """
def __init__(self, imagenet_folder, annotations_folder, logger):
"""TODO: to be defined1. """
self.logger = logger
self.imagenet_folder = imagenet_folder
self.annotations_folder = annotations_folder
if not os.path.isdir(imagenet_folder):
logger.error('{} is not a valid directory'.format(imagenet_folder))
def loaderImageNetDet(self):
"""TODO: Docstring for get_videos.
:returns: TODO
"""
logger = self.logger
imagenet_subdirs = sorted(self.find_subfolders(self.annotations_folder))
num_annotations = 0
list_of_annotations_out = []
for i, imgnet_sub_folder in enumerate(imagenet_subdirs):
annotations_files = sorted(glob.glob(os.path.join(self.annotations_folder, imgnet_sub_folder, '*.xml')))
logger.info('Loading {}/{} - annotation file from folder = {}'.format(i + 1, len(imagenet_subdirs), imgnet_sub_folder))
for ann in annotations_files:
list_of_annotations, num_ann_curr = self.load_annotation_file(ann)
num_annotations = num_annotations + num_ann_curr
if len(list_of_annotations) == 0:
continue
list_of_annotations_out.append(list_of_annotations)
logger.info('Found {} annotations from {} images'.format(num_annotations, len(list_of_annotations_out)))
# save it for future use
self.list_of_annotations_out = list_of_annotations_out
self.num_annotations = num_annotations
return list_of_annotations_out
def find_subfolders(self, imagenet_folder):
"""TODO: Docstring for find_subfolders.
:vot_folder: directory for vot videos
:returns: list of video sub directories
"""
return [dir_name for dir_name in os.listdir(imagenet_folder) if os.path.isdir(os.path.join(imagenet_folder, dir_name))]
def load_annotation_file(self, annotation_file):
"""TODO: Docstring for load_annotation_file.
:returns: TODO
"""
list_of_annotations = []
num_annotations = 0
root = ET.parse(annotation_file).getroot()
folder = root.find('folder').text
filename = root.find('filename').text
size = root.find('size')
disp_width = int(size.find('width').text)
disp_height = int(size.find('height').text)
for obj in root.findall('object'):
bbox = obj.find('bndbox')
xmin = int(bbox.find('xmin').text)
xmax = int(bbox.find('xmax').text)
ymin = int(bbox.find('ymin').text)
ymax = int(bbox.find('ymax').text)
width = xmax - xmin
height = ymax - ymin
if width > (kMaxRatio * disp_width) or height > (kMaxRatio * disp_height):
continue
if ((xmin < 0) or (ymin < 0) or (xmax <= xmin) or (ymax <= ymin)):
continue
objAnnotation = annotation()
objAnnotation.setbbox(xmin, xmax, ymin, ymax)
objAnnotation.setWidthHeight(disp_width, disp_height)
objAnnotation.setImagePath(os.path.join(folder, filename))
list_of_annotations.append(objAnnotation)
num_annotations = num_annotations + 1
return list_of_annotations, num_annotations
def load_annotation(self, image_num, annotation_num):
"""TODO: Docstring for load_annotation.
:returns: TODO
"""
logger = self.logger
images = self.list_of_annotations_out
list_annotations = images[image_num]
random_ann = list_annotations[annotation_num]
img_path = os.path.join(self.imagenet_folder, random_ann.image_path + '.JPEG')
if config.DEBUG:
img_path = "/media/nrupatunga/Data-Backup/DL/goturn/ILSVRC2014/ILSVRC2014_DET_train/ILSVRC2014_train_0005/ILSVRC2014_train_00059375.JPEG"
random_ann.bbox.x1 = 243
random_ann.bbox.y1 = 157
random_ann.bbox.x2 = 278
random_ann.bbox.y2 = 176
random_ann.disp_height = 375
random_ann.disp_width = 500
image = cv2.imread(img_path)
img_height = image.shape[0]
img_width = image.shape[1]
sc_factor_1 = 1.0
if img_height != random_ann.disp_height or img_width != random_ann.disp_width:
logger.info('Image Number = {}, Annotation Number = {}, Image file = {}'.format(image_num, annotation_num, img_path))
logger.info('Image Size = {} x {}'.format(img_width, img_height))
logger.info('Display Size = {} x {}'.format(random_ann.disp_width, random_ann.disp_height))
sc_factor_1 = (img_height * 1.) / random_ann.disp_height
sc_factor_2 = (img_width * 1.) / random_ann.disp_width
logger.info('Factor: {} {}'.format(sc_factor_1, sc_factor_2))
bbox = random_ann.bbox
bbox.x1 = bbox.x1 * sc_factor_1
bbox.x2 = bbox.x2 * sc_factor_1
bbox.y1 = bbox.y1 * sc_factor_1
bbox.y2 = bbox.y2 * sc_factor_1
return image, bbox
if '__main__' == __name__:
logger = setup_logger(logfile=None)
objLoaderImgNet = loader_imagenet('/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_train/', '/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_bbox_train/', logger)
dict_list_of_annotations = objLoaderImgNet.loaderImageNetDet()
|
tomasreimers/tensorflow-emscripten | refs/heads/master | tensorflow/python/training/moving_averages.py | 16 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Maintain moving averages of parameters."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import slot_creator
# TODO(touts): switch to variables.Variable.
def assign_moving_average(variable, value, decay, zero_debias=True, name=None):
"""Compute the moving average of a variable.
The moving average of 'variable' updated with 'value' is:
variable * decay + value * (1 - decay)
The returned Operation sets 'variable' to the newly computed moving average.
The new value of 'variable' can be set with the 'AssignSub' op as:
variable -= (1 - decay) * (variable - value)
Since variables that are initialized to a `0` value will be `0` biased,
`zero_debias` optionally enables scaling by the mathematically correct
debiasing factor of
1 - decay ** num_updates
See `ADAM: A Method for Stochastic Optimization` Section 3 for more details
(https://arxiv.org/abs/1412.6980).
Args:
variable: A Variable.
value: A tensor with the same shape as 'variable'.
decay: A float Tensor or float value. The moving average decay.
zero_debias: A python bool. If true, assume the variable is 0-intialized and
unbias it, as in https://arxiv.org/abs/1412.6980. See docstring in
`_zero_debias` for more details.
name: Optional name of the returned operation.
Returns:
An Operation that updates 'variable' with the newly computed
moving average.
"""
with ops.name_scope(name, "AssignMovingAvg",
[variable, value, decay]) as scope:
with ops.colocate_with(variable):
decay = ops.convert_to_tensor(1.0 - decay, name="decay")
if decay.dtype != variable.dtype.base_dtype:
decay = math_ops.cast(decay, variable.dtype.base_dtype)
if zero_debias:
update_delta = _zero_debias(variable, value, decay)
else:
update_delta = (variable - value) * decay
return state_ops.assign_sub(variable, update_delta, name=scope)
def weighted_moving_average(value,
decay,
weight,
truediv=True,
collections=None,
name=None):
"""Compute the weighted moving average of `value`.
Conceptually, the weighted moving average is:
`moving_average(value * weight) / moving_average(weight)`,
where a moving average updates by the rule
`new_value = decay * old_value + (1 - decay) * update`
Internally, this Op keeps moving average variables of both `value * weight`
and `weight`.
Args:
value: A numeric `Tensor`.
decay: A float `Tensor` or float value. The moving average decay.
weight: `Tensor` that keeps the current value of a weight.
Shape should be able to multiply `value`.
truediv: Boolean, if `True`, dividing by `moving_average(weight)` is
floating point division. If `False`, use division implied by dtypes.
collections: List of graph collections keys to add the internal variables
`value * weight` and `weight` to.
Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
name: Optional name of the returned operation.
Defaults to "WeightedMovingAvg".
Returns:
An Operation that updates and returns the weighted moving average.
"""
# Unlike assign_moving_average, the weighted moving average doesn't modify
# user-visible variables. It is the ratio of two internal variables, which are
# moving averages of the updates. Thus, the signature of this function is
# quite different than assign_moving_average.
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
with variable_scope.variable_scope(name, "WeightedMovingAvg",
[value, weight, decay]) as scope:
value_x_weight_var = variable_scope.get_variable(
"value_x_weight",
shape=value.get_shape(),
dtype=value.dtype,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=collections)
weight_var = variable_scope.get_variable(
"weight",
shape=weight.get_shape(),
dtype=weight.dtype,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=collections)
numerator = assign_moving_average(
value_x_weight_var, value * weight, decay, zero_debias=False)
denominator = assign_moving_average(
weight_var, weight, decay, zero_debias=False)
if truediv:
return math_ops.truediv(numerator, denominator, name=scope.name)
else:
return math_ops.div(numerator, denominator, name=scope.name)
def _zero_debias(unbiased_var, value, decay):
"""Compute the delta required for a debiased Variable.
All exponential moving averages initialized with Tensors are initialized to 0,
and therefore are biased to 0. Variables initialized to 0 and used as EMAs are
similarly biased. This function creates the debias updated amount according to
a scale factor, as in https://arxiv.org/abs/1412.6980.
To demonstrate the bias the results from 0-initialization, take an EMA that
was initialized to `0` with decay `b`. After `t` timesteps of seeing the
constant `c`, the variable have the following value:
```
EMA = 0*b^(t) + c*(1 - b)*b^(t-1) + c*(1 - b)*b^(t-2) + ...
= c*(1 - b^t)
```
To have the true value `c`, we would divide by the scale factor `1 - b^t`.
In order to perform debiasing, we use two shadow variables. One keeps track of
the biased estimate, and the other keeps track of the number of updates that
have occurred.
Args:
unbiased_var: A Variable representing the current value of the unbiased EMA.
value: A Tensor representing the most recent value.
decay: A Tensor representing `1-decay` for the EMA.
Returns:
The amount that the unbiased variable should be updated. Computing this
tensor will also update the shadow variables appropriately.
"""
with variable_scope.variable_scope(
unbiased_var.op.name, values=[unbiased_var, value, decay]) as scope:
with ops.colocate_with(unbiased_var):
with ops.control_dependencies(None):
biased_initializer = init_ops.zeros_initializer(
dtype=unbiased_var.dtype)(unbiased_var.get_shape())
local_step_initializer = init_ops.zeros_initializer()
biased_var = variable_scope.get_variable(
"biased", initializer=biased_initializer, trainable=False)
local_step = variable_scope.get_variable(
"local_step",
shape=[],
dtype=unbiased_var.dtype,
initializer=local_step_initializer,
trainable=False)
# Get an update ops for both shadow variables.
update_biased = state_ops.assign_sub(biased_var,
(biased_var - value) * decay,
name=scope.name)
update_local_step = local_step.assign_add(1)
# Compute the value of the delta to update the unbiased EMA. Make sure to
# use the new values of the biased variable and the local step.
with ops.control_dependencies([update_biased, update_local_step]):
# This function gets `1 - decay`, so use `1.0 - decay` in the exponent.
unbiased_ema_delta = (unbiased_var - biased_var.read_value() /
(1 - math_ops.pow(
1.0 - decay, local_step.read_value())))
return unbiased_ema_delta
class ExponentialMovingAverage(object):
"""Maintains moving averages of variables by employing an exponential decay.
When training a model, it is often beneficial to maintain moving averages of
the trained parameters. Evaluations that use averaged parameters sometimes
produce significantly better results than the final trained values.
The `apply()` method adds shadow copies of trained variables and add ops that
maintain a moving average of the trained variables in their shadow copies.
It is used when building the training model. The ops that maintain moving
averages are typically run after each training step.
The `average()` and `average_name()` methods give access to the shadow
variables and their names. They are useful when building an evaluation
model, or when restoring a model from a checkpoint file. They help use the
moving averages in place of the last trained values for evaluations.
The moving averages are computed using exponential decay. You specify the
decay value when creating the `ExponentialMovingAverage` object. The shadow
variables are initialized with the same initial values as the trained
variables. When you run the ops to maintain the moving averages, each
shadow variable is updated with the formula:
`shadow_variable -= (1 - decay) * (shadow_variable - variable)`
This is mathematically equivalent to the classic formula below, but the use
of an `assign_sub` op (the `"-="` in the formula) allows concurrent lockless
updates to the variables:
`shadow_variable = decay * shadow_variable + (1 - decay) * variable`
Reasonable values for `decay` are close to 1.0, typically in the
multiple-nines range: 0.999, 0.9999, etc.
Example usage when creating a training model:
```python
# Create variables.
var0 = tf.Variable(...)
var1 = tf.Variable(...)
# ... use the variables to build a training model...
...
# Create an op that applies the optimizer. This is what we usually
# would use as a training op.
opt_op = opt.minimize(my_loss, [var0, var1])
# Create an ExponentialMovingAverage object
ema = tf.train.ExponentialMovingAverage(decay=0.9999)
# Create the shadow variables, and add ops to maintain moving averages
# of var0 and var1.
maintain_averages_op = ema.apply([var0, var1])
# Create an op that will update the moving averages after each training
# step. This is what we will use in place of the usual training op.
with tf.control_dependencies([opt_op]):
training_op = tf.group(maintain_averages_op)
...train the model by running training_op...
```
There are two ways to use the moving averages for evaluations:
* Build a model that uses the shadow variables instead of the variables.
For this, use the `average()` method which returns the shadow variable
for a given variable.
* Build a model normally but load the checkpoint files to evaluate by using
the shadow variable names. For this use the `average_name()` method. See
the [Saver class](../../api_docs/python/train.md#Saver) for more
information on restoring saved variables.
Example of restoring the shadow variable values:
```python
# Create a Saver that loads variables from their saved shadow values.
shadow_var0_name = ema.average_name(var0)
shadow_var1_name = ema.average_name(var1)
saver = tf.train.Saver({shadow_var0_name: var0, shadow_var1_name: var1})
saver.restore(...checkpoint filename...)
# var0 and var1 now hold the moving average values
```
@@__init__
@@apply
@@average_name
@@average
@@variables_to_restore
"""
def __init__(self, decay, num_updates=None, zero_debias=False,
name="ExponentialMovingAverage"):
"""Creates a new ExponentialMovingAverage object.
The `apply()` method has to be called to create shadow variables and add
ops to maintain moving averages.
The optional `num_updates` parameter allows one to tweak the decay rate
dynamically. It is typical to pass the count of training steps, usually
kept in a variable that is incremented at each step, in which case the
decay rate is lower at the start of training. This makes moving averages
move faster. If passed, the actual decay rate used is:
`min(decay, (1 + num_updates) / (10 + num_updates))`
Args:
decay: Float. The decay to use.
num_updates: Optional count of number of updates applied to variables.
zero_debias: If `True`, zero debias moving-averages that are initialized
with tensors.
name: String. Optional prefix name to use for the name of ops added in
`apply()`.
"""
self._decay = decay
self._num_updates = num_updates
self._zero_debias = zero_debias
self._name = name
self._averages = {}
def apply(self, var_list=None):
"""Maintains moving averages of variables.
`var_list` must be a list of `Variable` or `Tensor` objects. This method
creates shadow variables for all elements of `var_list`. Shadow variables
for `Variable` objects are initialized to the variable's initial value.
They will be added to the `GraphKeys.MOVING_AVERAGE_VARIABLES` collection.
For `Tensor` objects, the shadow variables are initialized to 0 and zero
debiased (see docstring in `assign_moving_average` for more details).
shadow variables are created with `trainable=False` and added to the
`GraphKeys.ALL_VARIABLES` collection. They will be returned by calls to
`tf.global_variables()`.
Returns an op that updates all shadow variables as described above.
Note that `apply()` can be called multiple times with different lists of
variables.
Args:
var_list: A list of Variable or Tensor objects. The variables
and Tensors must be of types float16, float32, or float64.
Returns:
An Operation that updates the moving averages.
Raises:
TypeError: If the arguments are not all float16, float32, or float64.
ValueError: If the moving average of one of the variables is already
being computed.
"""
# TODO(touts): op_scope
if var_list is None:
var_list = variables.trainable_variables()
zero_debias_true = set() # set of vars to set `zero_debias=True`
for var in var_list:
if var.dtype.base_dtype not in [dtypes.float16, dtypes.float32,
dtypes.float64]:
raise TypeError("The variables must be half, float, or double: %s" %
var.name)
if var in self._averages:
raise ValueError("Moving average already computed for: %s" % var.name)
# For variables: to lower communication bandwidth across devices we keep
# the moving averages on the same device as the variables. For other
# tensors, we rely on the existing device allocation mechanism.
with ops.control_dependencies(None):
if isinstance(var, variables.Variable):
avg = slot_creator.create_slot(var,
var.initialized_value(),
self._name,
colocate_with_primary=True)
# NOTE(mrry): We only add `tf.Variable` objects to the
# `MOVING_AVERAGE_VARIABLES` collection.
ops.add_to_collection(ops.GraphKeys.MOVING_AVERAGE_VARIABLES, var)
else:
avg = slot_creator.create_zeros_slot(
var,
self._name,
colocate_with_primary=(var.op.type in ["Variable", "VariableV2"]))
if self._zero_debias:
zero_debias_true.add(avg)
self._averages[var] = avg
with ops.name_scope(self._name) as scope:
decay = ops.convert_to_tensor(self._decay, name="decay")
if self._num_updates is not None:
num_updates = math_ops.cast(self._num_updates,
dtypes.float32,
name="num_updates")
decay = math_ops.minimum(decay,
(1.0 + num_updates) / (10.0 + num_updates))
updates = []
for var in var_list:
zero_debias = self._averages[var] in zero_debias_true
updates.append(assign_moving_average(
self._averages[var], var, decay, zero_debias=zero_debias))
return control_flow_ops.group(*updates, name=scope)
def average(self, var):
"""Returns the `Variable` holding the average of `var`.
Args:
var: A `Variable` object.
Returns:
A `Variable` object or `None` if the moving average of `var`
is not maintained.
"""
return self._averages.get(var, None)
def average_name(self, var):
"""Returns the name of the `Variable` holding the average for `var`.
The typical scenario for `ExponentialMovingAverage` is to compute moving
averages of variables during training, and restore the variables from the
computed moving averages during evaluations.
To restore variables, you have to know the name of the shadow variables.
That name and the original variable can then be passed to a `Saver()` object
to restore the variable from the moving average value with:
`saver = tf.train.Saver({ema.average_name(var): var})`
`average_name()` can be called whether or not `apply()` has been called.
Args:
var: A `Variable` object.
Returns:
A string: The name of the variable that will be used or was used
by the `ExponentialMovingAverage class` to hold the moving average of
`var`.
"""
if var in self._averages:
return self._averages[var].op.name
return ops.get_default_graph().unique_name(
var.op.name + "/" + self._name, mark_as_used=False)
def variables_to_restore(self, moving_avg_variables=None):
"""Returns a map of names to `Variables` to restore.
If a variable has a moving average, use the moving average variable name as
the restore name; otherwise, use the variable name.
For example,
```python
variables_to_restore = ema.variables_to_restore()
saver = tf.train.Saver(variables_to_restore)
```
Below is an example of such mapping:
```
conv/batchnorm/gamma/ExponentialMovingAverage: conv/batchnorm/gamma,
conv_4/conv2d_params/ExponentialMovingAverage: conv_4/conv2d_params,
global_step: global_step
```
Args:
moving_avg_variables: a list of variables that require to use of the
moving variable name to be restored. If None, it will default to
variables.moving_average_variables() + variables.trainable_variables()
Returns:
A map from restore_names to variables. The restore_name can be the
moving_average version of the variable name if it exist, or the original
variable name.
"""
name_map = {}
if moving_avg_variables is None:
# Include trainable variables and variables which have been explicitly
# added to the moving_average_variables collection.
moving_avg_variables = variables.trainable_variables()
moving_avg_variables += variables.moving_average_variables()
# Remove duplicates
moving_avg_variables = set(moving_avg_variables)
# Collect all the variables with moving average,
for v in moving_avg_variables:
name_map[self.average_name(v)] = v
# Make sure we restore variables without moving average as well.
for v in list(set(variables.global_variables()) - moving_avg_variables):
if v.op.name not in name_map:
name_map[v.op.name] = v
return name_map
|
pluskid/mxnet | refs/heads/master | example/image-classification/symbols/resnet.py | 13 | '''
Adapted from https://github.com/tornadomeet/ResNet/blob/master/symbol_resnet.py
Original author Wei Wu
Implemented the following paper:
Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. "Identity Mappings in Deep Residual Networks"
'''
import mxnet as mx
def residual_unit(data, num_filter, stride, dim_match, name, bottle_neck=True, bn_mom=0.9, workspace=256, memonger=False):
"""Return ResNet Unit symbol for building ResNet
Parameters
----------
data : str
Input data
num_filter : int
Number of output channels
bnf : int
Bottle neck channels factor with regard to num_filter
stride : tupe
Stride used in convolution
dim_match : Boolen
True means channel number between input and output is the same, otherwise means differ
name : str
Base name of the operators
workspace : int
Workspace used in convolution operator
"""
if bottle_neck:
# the same as https://github.com/facebook/fb.resnet.torch#notes, a bit difference with origin paper
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=int(num_filter*0.25), kernel=(1,1), stride=(1,1), pad=(0,0),
no_bias=True, workspace=workspace, name=name + '_conv1')
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv2 = mx.sym.Convolution(data=act2, num_filter=int(num_filter*0.25), kernel=(3,3), stride=stride, pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv2')
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn3')
act3 = mx.sym.Activation(data=bn3, act_type='relu', name=name + '_relu3')
conv3 = mx.sym.Convolution(data=act3, num_filter=num_filter, kernel=(1,1), stride=(1,1), pad=(0,0), no_bias=True,
workspace=workspace, name=name + '_conv3')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1,1), stride=stride, no_bias=True,
workspace=workspace, name=name+'_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv3 + shortcut
else:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(3,3), stride=stride, pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv1')
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv2 = mx.sym.Convolution(data=act2, num_filter=num_filter, kernel=(3,3), stride=(1,1), pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv2')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1,1), stride=stride, no_bias=True,
workspace=workspace, name=name+'_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv2 + shortcut
def resnet(units, num_stages, filter_list, num_classes, image_shape, bottle_neck=True, bn_mom=0.9, workspace=256, memonger=False):
"""Return ResNet symbol of
Parameters
----------
units : list
Number of units in each stage
num_stages : int
Number of stage
filter_list : list
Channel size of each stage
num_classes : int
Ouput size of symbol
dataset : str
Dataset type, only cifar10 and imagenet supports
workspace : int
Workspace used in convolution operator
"""
num_unit = len(units)
assert(num_unit == num_stages)
data = mx.sym.Variable(name='data')
data = mx.sym.identity(data=data, name='id')
data = mx.sym.BatchNorm(data=data, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='bn_data')
(nchannel, height, width) = image_shape
if height <= 32: # such as cifar10
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(3, 3), stride=(1,1), pad=(1, 1),
no_bias=True, name="conv0", workspace=workspace)
else: # often expected to be 224 such as imagenet
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(7, 7), stride=(2,2), pad=(3, 3),
no_bias=True, name="conv0", workspace=workspace)
body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn0')
body = mx.sym.Activation(data=body, act_type='relu', name='relu0')
body = mx.symbol.Pooling(data=body, kernel=(3, 3), stride=(2,2), pad=(1,1), pool_type='max')
for i in range(num_stages):
body = residual_unit(body, filter_list[i+1], (1 if i==0 else 2, 1 if i==0 else 2), False,
name='stage%d_unit%d' % (i + 1, 1), bottle_neck=bottle_neck, workspace=workspace,
memonger=memonger)
for j in range(units[i]-1):
body = residual_unit(body, filter_list[i+1], (1,1), True, name='stage%d_unit%d' % (i + 1, j + 2),
bottle_neck=bottle_neck, workspace=workspace, memonger=memonger)
bn1 = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1')
relu1 = mx.sym.Activation(data=bn1, act_type='relu', name='relu1')
# Although kernel is not used here when global_pool=True, we should put one
pool1 = mx.symbol.Pooling(data=relu1, global_pool=True, kernel=(7, 7), pool_type='avg', name='pool1')
flat = mx.symbol.Flatten(data=pool1)
fc1 = mx.symbol.FullyConnected(data=flat, num_hidden=num_classes, name='fc1')
return mx.symbol.SoftmaxOutput(data=fc1, name='softmax')
def get_symbol(num_classes, num_layers, image_shape, conv_workspace=256, **kwargs):
"""
Adapted from https://github.com/tornadomeet/ResNet/blob/master/train_resnet.py
Original author Wei Wu
"""
image_shape = [int(l) for l in image_shape.split(',')]
(nchannel, height, width) = image_shape
if height <= 28:
num_stages = 3
if (num_layers-2) % 9 == 0 and num_layers >= 164:
per_unit = [(num_layers-2)//9]
filter_list = [16, 64, 128, 256]
bottle_neck = True
elif (num_layers-2) % 6 == 0 and num_layers < 164:
per_unit = [(num_layers-2)//6]
filter_list = [16, 16, 32, 64]
bottle_neck = False
else:
raise ValueError("no experiments done on num_layers {}, you can do it yourself".format(num_layers))
units = per_unit * num_stages
else:
if num_layers >= 50:
filter_list = [64, 256, 512, 1024, 2048]
bottle_neck = True
else:
filter_list = [64, 64, 128, 256, 512]
bottle_neck = False
num_stages = 4
if num_layers == 18:
units = [2, 2, 2, 2]
elif num_layers == 34:
units = [3, 4, 6, 3]
elif num_layers == 50:
units = [3, 4, 6, 3]
elif num_layers == 101:
units = [3, 4, 23, 3]
elif num_layers == 152:
units = [3, 8, 36, 3]
elif num_layers == 200:
units = [3, 24, 36, 3]
elif num_layers == 269:
units = [3, 30, 48, 8]
else:
raise ValueError("no experiments done on num_layers {}, you can do it yourself".format(num_layers))
return resnet(units = units,
num_stages = num_stages,
filter_list = filter_list,
num_classes = num_classes,
image_shape = image_shape,
bottle_neck = bottle_neck,
workspace = conv_workspace)
|
virtualopensystems/neutron | refs/heads/master | neutron/plugins/bigswitch/servermanager.py | 1 | # Copyright 2014 Big Switch Networks, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mandeep Dhami, Big Switch Networks, Inc.
# @author: Sumit Naiksatam, sumitnaiksatam@gmail.com, Big Switch Networks, Inc.
# @author: Kevin Benton, Big Switch Networks, Inc.
"""
This module manages the HTTP and HTTPS connections to the backend controllers.
The main class it provides for external use is ServerPool which manages a set
of ServerProxy objects that correspond to individual backend controllers.
The following functionality is handled by this module:
- Translation of rest_* function calls to HTTP/HTTPS calls to the controllers
- Automatic failover between controllers
- SSL Certificate enforcement
- HTTP Authentication
"""
import base64
import httplib
import os
import socket
import ssl
import weakref
import eventlet
import eventlet.corolocal
from oslo.config import cfg
from neutron.common import exceptions
from neutron.common import utils
from neutron.openstack.common import excutils
from neutron.openstack.common import jsonutils as json
from neutron.openstack.common import log as logging
from neutron.plugins.bigswitch.db import consistency_db as cdb
LOG = logging.getLogger(__name__)
# The following are used to invoke the API on the external controller
CAPABILITIES_PATH = "/capabilities"
NET_RESOURCE_PATH = "/tenants/%s/networks"
PORT_RESOURCE_PATH = "/tenants/%s/networks/%s/ports"
ROUTER_RESOURCE_PATH = "/tenants/%s/routers"
ROUTER_INTF_OP_PATH = "/tenants/%s/routers/%s/interfaces"
NETWORKS_PATH = "/tenants/%s/networks/%s"
FLOATINGIPS_PATH = "/tenants/%s/floatingips/%s"
PORTS_PATH = "/tenants/%s/networks/%s/ports/%s"
ATTACHMENT_PATH = "/tenants/%s/networks/%s/ports/%s/attachment"
ROUTERS_PATH = "/tenants/%s/routers/%s"
ROUTER_INTF_PATH = "/tenants/%s/routers/%s/interfaces/%s"
TOPOLOGY_PATH = "/topology"
HEALTH_PATH = "/health"
SUCCESS_CODES = range(200, 207)
FAILURE_CODES = [0, 301, 302, 303, 400, 401, 403, 404, 500, 501, 502, 503,
504, 505]
BASE_URI = '/networkService/v1.1'
ORCHESTRATION_SERVICE_ID = 'Neutron v2.0'
HASH_MATCH_HEADER = 'X-BSN-BVS-HASH-MATCH'
# error messages
NXNETWORK = 'NXVNS'
class RemoteRestError(exceptions.NeutronException):
message = _("Error in REST call to remote network "
"controller: %(reason)s")
status = None
def __init__(self, **kwargs):
self.status = kwargs.pop('status', None)
self.reason = kwargs.get('reason')
super(RemoteRestError, self).__init__(**kwargs)
class ServerProxy(object):
"""REST server proxy to a network controller."""
def __init__(self, server, port, ssl, auth, neutron_id, timeout,
base_uri, name, mypool, combined_cert):
self.server = server
self.port = port
self.ssl = ssl
self.base_uri = base_uri
self.timeout = timeout
self.name = name
self.success_codes = SUCCESS_CODES
self.auth = None
self.neutron_id = neutron_id
self.failed = False
self.capabilities = []
# enable server to reference parent pool
self.mypool = mypool
# cache connection here to avoid a SSL handshake for every connection
self.currentconn = None
if auth:
self.auth = 'Basic ' + base64.encodestring(auth).strip()
self.combined_cert = combined_cert
def get_capabilities(self):
try:
body = self.rest_call('GET', CAPABILITIES_PATH)[2]
self.capabilities = json.loads(body)
except Exception:
LOG.exception(_("Couldn't retrieve capabilities. "
"Newer API calls won't be supported."))
LOG.info(_("The following capabilities were received "
"for %(server)s: %(cap)s"), {'server': self.server,
'cap': self.capabilities})
return self.capabilities
def rest_call(self, action, resource, data='', headers={}, timeout=False,
reconnect=False, hash_handler=None):
uri = self.base_uri + resource
body = json.dumps(data)
if not headers:
headers = {}
headers['Content-type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['NeutronProxy-Agent'] = self.name
headers['Instance-ID'] = self.neutron_id
headers['Orchestration-Service-ID'] = ORCHESTRATION_SERVICE_ID
if hash_handler:
# this will be excluded on calls that don't need hashes
# (e.g. topology sync, capability checks)
headers[HASH_MATCH_HEADER] = hash_handler.read_for_update()
else:
hash_handler = cdb.HashHandler()
if 'keep-alive' in self.capabilities:
headers['Connection'] = 'keep-alive'
else:
reconnect = True
if self.auth:
headers['Authorization'] = self.auth
LOG.debug(_("ServerProxy: server=%(server)s, port=%(port)d, "
"ssl=%(ssl)r"),
{'server': self.server, 'port': self.port, 'ssl': self.ssl})
LOG.debug(_("ServerProxy: resource=%(resource)s, data=%(data)r, "
"headers=%(headers)r, action=%(action)s"),
{'resource': resource, 'data': data, 'headers': headers,
'action': action})
# unspecified timeout is False because a timeout can be specified as
# None to indicate no timeout.
if timeout is False:
timeout = self.timeout
if timeout != self.timeout:
# need a new connection if timeout has changed
reconnect = True
if not self.currentconn or reconnect:
if self.currentconn:
self.currentconn.close()
if self.ssl:
self.currentconn = HTTPSConnectionWithValidation(
self.server, self.port, timeout=timeout)
if self.currentconn is None:
LOG.error(_('ServerProxy: Could not establish HTTPS '
'connection'))
return 0, None, None, None
self.currentconn.combined_cert = self.combined_cert
else:
self.currentconn = httplib.HTTPConnection(
self.server, self.port, timeout=timeout)
if self.currentconn is None:
LOG.error(_('ServerProxy: Could not establish HTTP '
'connection'))
return 0, None, None, None
try:
self.currentconn.request(action, uri, body, headers)
response = self.currentconn.getresponse()
respstr = response.read()
respdata = respstr
if response.status in self.success_codes:
hash_value = response.getheader(HASH_MATCH_HEADER)
# don't clear hash from DB if a hash header wasn't present
if hash_value is not None:
hash_handler.put_hash(hash_value)
try:
respdata = json.loads(respstr)
except ValueError:
# response was not JSON, ignore the exception
pass
else:
hash_handler.close_update_session()
ret = (response.status, response.reason, respstr, respdata)
except httplib.HTTPException:
# If we were using a cached connection, try again with a new one.
with excutils.save_and_reraise_exception() as ctxt:
self.currentconn.close()
if reconnect:
# if reconnect is true, this was on a fresh connection so
# reraise since this server seems to be broken
ctxt.reraise = True
else:
# if reconnect is false, it was a cached connection so
# try one more time before re-raising
ctxt.reraise = False
return self.rest_call(action, resource, data, headers,
timeout=timeout, reconnect=True)
except (socket.timeout, socket.error) as e:
self.currentconn.close()
LOG.error(_('ServerProxy: %(action)s failure, %(e)r'),
{'action': action, 'e': e})
ret = 0, None, None, None
LOG.debug(_("ServerProxy: status=%(status)d, reason=%(reason)r, "
"ret=%(ret)s, data=%(data)r"), {'status': ret[0],
'reason': ret[1],
'ret': ret[2],
'data': ret[3]})
return ret
class ServerPool(object):
def __init__(self, timeout=False,
base_uri=BASE_URI, name='NeutronRestProxy'):
LOG.debug(_("ServerPool: initializing"))
# 'servers' is the list of network controller REST end-points
# (used in order specified till one succeeds, and it is sticky
# till next failure). Use 'server_auth' to encode api-key
servers = cfg.CONF.RESTPROXY.servers
self.auth = cfg.CONF.RESTPROXY.server_auth
self.ssl = cfg.CONF.RESTPROXY.server_ssl
self.neutron_id = cfg.CONF.RESTPROXY.neutron_id
self.base_uri = base_uri
self.name = name
self.contexts = {}
self.timeout = cfg.CONF.RESTPROXY.server_timeout
self.always_reconnect = not cfg.CONF.RESTPROXY.cache_connections
default_port = 8000
if timeout is not False:
self.timeout = timeout
# Function to use to retrieve topology for consistency syncs.
# Needs to be set by module that uses the servermanager.
self.get_topo_function = None
self.get_topo_function_args = {}
if not servers:
raise cfg.Error(_('Servers not defined. Aborting server manager.'))
servers = [s if len(s.rsplit(':', 1)) == 2
else "%s:%d" % (s, default_port)
for s in servers]
if any((len(spl) != 2 or not spl[1].isdigit())
for spl in [sp.rsplit(':', 1)
for sp in servers]):
raise cfg.Error(_('Servers must be defined as <ip>:<port>. '
'Configuration was %s') % servers)
self.servers = [
self.server_proxy_for(server, int(port))
for server, port in (s.rsplit(':', 1) for s in servers)
]
eventlet.spawn(self._consistency_watchdog,
cfg.CONF.RESTPROXY.consistency_interval)
LOG.debug(_("ServerPool: initialization done"))
def set_context(self, context):
# this context needs to be local to the greenthread
# so concurrent requests don't use the wrong context.
# Use a weakref so the context is garbage collected
# after the plugin is done with it.
ref = weakref.ref(context)
self.contexts[eventlet.corolocal.get_ident()] = ref
def get_context_ref(self):
# Try to get the context cached for this thread. If one
# doesn't exist or if it's been garbage collected, this will
# just return None.
try:
return self.contexts[eventlet.corolocal.get_ident()]()
except KeyError:
return None
def get_capabilities(self):
# lookup on first try
try:
return self.capabilities
except AttributeError:
# each server should return a list of capabilities it supports
# e.g. ['floatingip']
capabilities = [set(server.get_capabilities())
for server in self.servers]
# Pool only supports what all of the servers support
self.capabilities = set.intersection(*capabilities)
return self.capabilities
def server_proxy_for(self, server, port):
combined_cert = self._get_combined_cert_for_server(server, port)
return ServerProxy(server, port, self.ssl, self.auth, self.neutron_id,
self.timeout, self.base_uri, self.name, mypool=self,
combined_cert=combined_cert)
def _get_combined_cert_for_server(self, server, port):
# The ssl library requires a combined file with all trusted certs
# so we make one containing the trusted CAs and the corresponding
# host cert for this server
combined_cert = None
if self.ssl and not cfg.CONF.RESTPROXY.no_ssl_validation:
base_ssl = cfg.CONF.RESTPROXY.ssl_cert_directory
host_dir = os.path.join(base_ssl, 'host_certs')
ca_dir = os.path.join(base_ssl, 'ca_certs')
combined_dir = os.path.join(base_ssl, 'combined')
combined_cert = os.path.join(combined_dir, '%s.pem' % server)
if not os.path.exists(base_ssl):
raise cfg.Error(_('ssl_cert_directory [%s] does not exist. '
'Create it or disable ssl.') % base_ssl)
for automake in [combined_dir, ca_dir, host_dir]:
if not os.path.exists(automake):
os.makedirs(automake)
# get all CA certs
certs = self._get_ca_cert_paths(ca_dir)
# check for a host specific cert
hcert, exists = self._get_host_cert_path(host_dir, server)
if exists:
certs.append(hcert)
elif cfg.CONF.RESTPROXY.ssl_sticky:
self._fetch_and_store_cert(server, port, hcert)
certs.append(hcert)
if not certs:
raise cfg.Error(_('No certificates were found to verify '
'controller %s') % (server))
self._combine_certs_to_file(certs, combined_cert)
return combined_cert
def _combine_certs_to_file(self, certs, cfile):
'''
Concatenates the contents of each certificate in a list of
certificate paths to one combined location for use with ssl
sockets.
'''
with open(cfile, 'w') as combined:
for c in certs:
with open(c, 'r') as cert_handle:
combined.write(cert_handle.read())
def _get_host_cert_path(self, host_dir, server):
'''
returns full path and boolean indicating existence
'''
hcert = os.path.join(host_dir, '%s.pem' % server)
if os.path.exists(hcert):
return hcert, True
return hcert, False
def _get_ca_cert_paths(self, ca_dir):
certs = [os.path.join(root, name)
for name in [
name for (root, dirs, files) in os.walk(ca_dir)
for name in files
]
if name.endswith('.pem')]
return certs
def _fetch_and_store_cert(self, server, port, path):
'''
Grabs a certificate from a server and writes it to
a given path.
'''
try:
cert = ssl.get_server_certificate((server, port))
except Exception as e:
raise cfg.Error(_('Could not retrieve initial '
'certificate from controller %(server)s. '
'Error details: %(error)s') %
{'server': server, 'error': str(e)})
LOG.warning(_("Storing to certificate for host %(server)s "
"at %(path)s") % {'server': server,
'path': path})
self._file_put_contents(path, cert)
return cert
def _file_put_contents(self, path, contents):
# Simple method to write to file.
# Created for easy Mocking
with open(path, 'w') as handle:
handle.write(contents)
def server_failure(self, resp, ignore_codes=[]):
"""Define failure codes as required.
Note: We assume 301-303 is a failure, and try the next server in
the server pool.
"""
return (resp[0] in FAILURE_CODES and resp[0] not in ignore_codes)
def action_success(self, resp):
"""Defining success codes as required.
Note: We assume any valid 2xx as being successful response.
"""
return resp[0] in SUCCESS_CODES
@utils.synchronized('bsn-rest-call')
def rest_call(self, action, resource, data, headers, ignore_codes,
timeout=False):
hash_handler = cdb.HashHandler(context=self.get_context_ref())
good_first = sorted(self.servers, key=lambda x: x.failed)
first_response = None
for active_server in good_first:
ret = active_server.rest_call(action, resource, data, headers,
timeout,
reconnect=self.always_reconnect,
hash_handler=hash_handler)
# If inconsistent, do a full synchronization
if ret[0] == httplib.CONFLICT:
if not self.get_topo_function:
raise cfg.Error(_('Server requires synchronization, '
'but no topology function was defined.'))
# The hash was incorrect so it needs to be removed
hash_handler.put_hash('')
data = self.get_topo_function(**self.get_topo_function_args)
active_server.rest_call('PUT', TOPOLOGY_PATH, data,
timeout=None)
# Store the first response as the error to be bubbled up to the
# user since it was a good server. Subsequent servers will most
# likely be cluster slaves and won't have a useful error for the
# user (e.g. 302 redirect to master)
if not first_response:
first_response = ret
if not self.server_failure(ret, ignore_codes):
active_server.failed = False
return ret
else:
LOG.error(_('ServerProxy: %(action)s failure for servers: '
'%(server)r Response: %(response)s'),
{'action': action,
'server': (active_server.server,
active_server.port),
'response': ret[3]})
LOG.error(_("ServerProxy: Error details: status=%(status)d, "
"reason=%(reason)r, ret=%(ret)s, data=%(data)r"),
{'status': ret[0], 'reason': ret[1], 'ret': ret[2],
'data': ret[3]})
active_server.failed = True
# All servers failed, reset server list and try again next time
LOG.error(_('ServerProxy: %(action)s failure for all servers: '
'%(server)r'),
{'action': action,
'server': tuple((s.server,
s.port) for s in self.servers)})
return first_response
def rest_action(self, action, resource, data='', errstr='%s',
ignore_codes=[], headers={}, timeout=False):
"""
Wrapper for rest_call that verifies success and raises a
RemoteRestError on failure with a provided error string
By default, 404 errors on DELETE calls are ignored because
they already do not exist on the backend.
"""
if not ignore_codes and action == 'DELETE':
ignore_codes = [404]
resp = self.rest_call(action, resource, data, headers, ignore_codes,
timeout)
if self.server_failure(resp, ignore_codes):
LOG.error(errstr, resp[2])
raise RemoteRestError(reason=resp[2], status=resp[0])
if resp[0] in ignore_codes:
LOG.warning(_("NeutronRestProxyV2: Received and ignored error "
"code %(code)s on %(action)s action to resource "
"%(resource)s"),
{'code': resp[2], 'action': action,
'resource': resource})
return resp
def rest_create_router(self, tenant_id, router):
resource = ROUTER_RESOURCE_PATH % tenant_id
data = {"router": router}
errstr = _("Unable to create remote router: %s")
self.rest_action('POST', resource, data, errstr)
def rest_update_router(self, tenant_id, router, router_id):
resource = ROUTERS_PATH % (tenant_id, router_id)
data = {"router": router}
errstr = _("Unable to update remote router: %s")
self.rest_action('PUT', resource, data, errstr)
def rest_delete_router(self, tenant_id, router_id):
resource = ROUTERS_PATH % (tenant_id, router_id)
errstr = _("Unable to delete remote router: %s")
self.rest_action('DELETE', resource, errstr=errstr)
def rest_add_router_interface(self, tenant_id, router_id, intf_details):
resource = ROUTER_INTF_OP_PATH % (tenant_id, router_id)
data = {"interface": intf_details}
errstr = _("Unable to add router interface: %s")
self.rest_action('POST', resource, data, errstr)
def rest_remove_router_interface(self, tenant_id, router_id, interface_id):
resource = ROUTER_INTF_PATH % (tenant_id, router_id, interface_id)
errstr = _("Unable to delete remote intf: %s")
self.rest_action('DELETE', resource, errstr=errstr)
def rest_create_network(self, tenant_id, network):
resource = NET_RESOURCE_PATH % tenant_id
data = {"network": network}
errstr = _("Unable to create remote network: %s")
self.rest_action('POST', resource, data, errstr)
def rest_update_network(self, tenant_id, net_id, network):
resource = NETWORKS_PATH % (tenant_id, net_id)
data = {"network": network}
errstr = _("Unable to update remote network: %s")
self.rest_action('PUT', resource, data, errstr)
def rest_delete_network(self, tenant_id, net_id):
resource = NETWORKS_PATH % (tenant_id, net_id)
errstr = _("Unable to update remote network: %s")
self.rest_action('DELETE', resource, errstr=errstr)
def rest_create_port(self, tenant_id, net_id, port):
resource = ATTACHMENT_PATH % (tenant_id, net_id, port["id"])
data = {"port": port}
device_id = port.get("device_id")
if not port["mac_address"] or not device_id:
# controller only cares about ports attached to devices
LOG.warning(_("No device MAC attached to port %s. "
"Skipping notification to controller."), port["id"])
return
data["attachment"] = {"id": device_id,
"mac": port["mac_address"]}
errstr = _("Unable to create remote port: %s")
self.rest_action('PUT', resource, data, errstr)
def rest_delete_port(self, tenant_id, network_id, port_id):
resource = ATTACHMENT_PATH % (tenant_id, network_id, port_id)
errstr = _("Unable to delete remote port: %s")
self.rest_action('DELETE', resource, errstr=errstr)
def rest_update_port(self, tenant_id, net_id, port):
# Controller has no update operation for the port endpoint
# the create PUT method will replace
self.rest_create_port(tenant_id, net_id, port)
def rest_create_floatingip(self, tenant_id, floatingip):
resource = FLOATINGIPS_PATH % (tenant_id, floatingip['id'])
errstr = _("Unable to create floating IP: %s")
self.rest_action('PUT', resource, errstr=errstr)
def rest_update_floatingip(self, tenant_id, floatingip, oldid):
resource = FLOATINGIPS_PATH % (tenant_id, oldid)
errstr = _("Unable to update floating IP: %s")
self.rest_action('PUT', resource, errstr=errstr)
def rest_delete_floatingip(self, tenant_id, oldid):
resource = FLOATINGIPS_PATH % (tenant_id, oldid)
errstr = _("Unable to delete floating IP: %s")
self.rest_action('DELETE', resource, errstr=errstr)
def _consistency_watchdog(self, polling_interval=60):
if 'consistency' not in self.get_capabilities():
LOG.warning(_("Backend server(s) do not support automated "
"consitency checks."))
return
if not polling_interval:
LOG.warning(_("Consistency watchdog disabled by polling interval "
"setting of %s."), polling_interval)
return
while True:
# If consistency is supported, all we have to do is make any
# rest call and the consistency header will be added. If it
# doesn't match, the backend will return a synchronization error
# that will be handled by the rest_action.
eventlet.sleep(polling_interval)
try:
self.rest_action('GET', HEALTH_PATH)
except Exception:
LOG.exception(_("Encountered an error checking controller "
"health."))
class HTTPSConnectionWithValidation(httplib.HTTPSConnection):
# If combined_cert is None, the connection will continue without
# any certificate validation.
combined_cert = None
def connect(self):
try:
sock = socket.create_connection((self.host, self.port),
self.timeout, self.source_address)
except AttributeError:
# python 2.6 doesn't have the source_address attribute
sock = socket.create_connection((self.host, self.port),
self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
if self.combined_cert:
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=self.combined_cert)
else:
self.sock = ssl.wrap_socket(sock, self.key_file,
self.cert_file,
cert_reqs=ssl.CERT_NONE)
|
shahar-stratoscale/nova | refs/heads/master | nova/api/openstack/compute/contrib/cloudpipe_update.py | 13 | # Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import db
from nova.openstack.common.gettextutils import _
authorize = extensions.extension_authorizer('compute', 'cloudpipe_update')
class CloudpipeUpdateController(wsgi.Controller):
"""Handle updating the vpn ip/port for cloudpipe instances."""
def __init__(self):
super(CloudpipeUpdateController, self).__init__()
@wsgi.action("update")
def update(self, req, id, body):
"""Configure cloudpipe parameters for the project."""
context = req.environ['nova.context']
authorize(context)
if id != "configure-project":
msg = _("Unknown action %s") % id
raise webob.exc.HTTPBadRequest(explanation=msg)
project_id = context.project_id
try:
params = body['configure_project']
vpn_ip = params['vpn_ip']
vpn_port = params['vpn_port']
except (TypeError, KeyError):
raise webob.exc.HTTPUnprocessableEntity()
networks = db.project_get_networks(context, project_id)
for network in networks:
db.network_update(context, network['id'],
{'vpn_public_address': vpn_ip,
'vpn_public_port': int(vpn_port)})
return webob.exc.HTTPAccepted()
class Cloudpipe_update(extensions.ExtensionDescriptor):
"""Adds the ability to set the vpn ip/port for cloudpipe instances."""
name = "CloudpipeUpdate"
alias = "os-cloudpipe-update"
namespace = "http://docs.openstack.org/compute/ext/cloudpipe-update/api/v2"
updated = "2012-11-14T00:00:00+00:00"
def get_controller_extensions(self):
controller = CloudpipeUpdateController()
extension = extensions.ControllerExtension(self, 'os-cloudpipe',
controller)
return [extension]
|
Rentier/setlx2py | refs/heads/master | setlx2py/builtin/setlx_functions.py | 1 | #------------------------------------------------------------------------------
# setlx2py: setlx_list.py
#
# Predefined functions of SetlX
#
# Copyright (C) 2014, Jan-Christoph Klie
# License: Apache v2
#------------------------------------------------------------------------------
import math
import functools
import operator
import random
import sys
import itertools as it
import collections
from setlx2py.builtin.setlx_set import SetlxSet
from setlx2py.builtin.setlx_string import SetlxString
from setlx2py.builtin.setlx_list import SetlxList
# Functions and Operators on Sets and Lists
# =========================================
#5
def stlx_sum(s):
if len(s) == 0: return 0
return reduce(operator.add, s)
#7
def stlx_arb(m):
"""The function arb(s) picks an arbitrary element from the sequence s. The argument s
can either be a set, a list, or a string. """
return random.sample(m, 1)[0]
#8
def stlx_collect(m):
d = collections.defaultdict(int)
for x in m:
d[x] += 1
return SetlxSet( [ SetlxList( [k, c]) for k, c in d.iteritems()])
#9
def stlx_first(s):
return next(iter(s))
#10
def stlx_last(s):
return s[-1]
#11
def stlx_from(m):
"""The function from(s) picks an arbitrary element from the sequence s. The argument
s can either be a set, a list, or a string. This element is removed from s and returned. This
function returns the same element as the function arb discussed previously.
"""
n = stlx_arb(m)
m.remove(n)
return n
#12
def stlx_fromB(s):
e = stlx_first(s)
s.remove(e)
return e
#13
def stlx_fromE(s):
e = stlx_last(s)
del s[-1]
return e
#14
def stlx_domain(s):
""""""
lst = [x for x, unused in s]
return SetlxSet(lst)
#17
def stlx_powerset(s):
"""If s is a set, the expression pow(s) computes the power set of s. The power set of s is
defined as the set of all subsets of s."""
def powerset_generator(i):
for subset in it.chain.from_iterable(it.combinations(i, r) for r in range(len(i)+1)):
yield set(subset)
return SetlxSet(SetlxSet(z) for z in powerset_generator(s))
#18
def stlx_range(s):
""" If r is a binary relation, then the equality
range(r) = { y :[x,y] in R }
holds.
"""
lst = [y for unused, y in s]
return SetlxSet(lst)
# Mathematical Functions
# =========================================
stlx_sin = math.sin
stlx_cos = math.cos
stlx_tan = math.tan
stlx_asin = math.asin
stlx_acos = math.acos
stlx_atan = math.atan
#==========================================
def stlx_pow(a,b=None):
if b is None:
return stlx_powerset(a)
elif isinstance(a, SetlxSet) and b == 2:
return stlx_cartesian(a,a)
elif isinstance(b, SetlxSet) and a == 2:
return stlx_powerset(b)
else:
return a ** b
def stlx_print(*args):
s = ''.join(str(arg) for arg in args) + '\n'
sys.stdout.write(s)
def stlx_lst_from_range(*args):
"""
[a..z]
[a,b..z]
"""
if not len(args) in [2,3]:
raise Exception("stlx_range needs two or three arguments!")
if len(args) == 2:
a, z = args
s = 1
comp = operator.le
else:
a, b, z = args
s = b - a
comp = operator.le if a < b else operator.ge
n = a
while comp(n, z):
yield n
n += s
stlx_len = len
stlx_zip = zip
stlx_char = chr
stlx_is_string = lambda x : isinstance(x, basestring)
# Math
# ----
stlx_factorial = math.factorial
# Logic
# -----
def stlx_implies(p,q):
return not(p) or q
def stlx_equivalent(p,q):
return p == q
def stlx_antivalent(p,q):
return not stlx_equivalent(p,q)
# Sets
# ----
def stlx_cartesian(*args):
return SetlxSet(it.product(*args))
def stlx_product(factors):
return functools.reduce(operator.mul, factors, 1)
# Misc
# ----
def stlx_abort(s):
raise Exception(s)
def is_builtin_function(name):
return name in [ 'print', 'from', 'arb', 'pow', 'char', 'isString', 'abort', 'cos', 'powerset', 'domain', 'range' ]
|
molotof/infernal-twin | refs/heads/master | build/pillow/PIL/PcfFontFile.py | 72 | #
# THIS IS WORK IN PROGRESS
#
# The Python Imaging Library
# $Id$
#
# portable compiled font file parser
#
# history:
# 1997-08-19 fl created
# 2003-09-13 fl fixed loading of unicode fonts
#
# Copyright (c) 1997-2003 by Secret Labs AB.
# Copyright (c) 1997-2003 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
from PIL import Image
from PIL import FontFile
from PIL import _binary
# --------------------------------------------------------------------
# declarations
PCF_MAGIC = 0x70636601 # "\x01fcp"
PCF_PROPERTIES = (1 << 0)
PCF_ACCELERATORS = (1 << 1)
PCF_METRICS = (1 << 2)
PCF_BITMAPS = (1 << 3)
PCF_INK_METRICS = (1 << 4)
PCF_BDF_ENCODINGS = (1 << 5)
PCF_SWIDTHS = (1 << 6)
PCF_GLYPH_NAMES = (1 << 7)
PCF_BDF_ACCELERATORS = (1 << 8)
BYTES_PER_ROW = [
lambda bits: ((bits+7) >> 3),
lambda bits: ((bits+15) >> 3) & ~1,
lambda bits: ((bits+31) >> 3) & ~3,
lambda bits: ((bits+63) >> 3) & ~7,
]
i8 = _binary.i8
l16 = _binary.i16le
l32 = _binary.i32le
b16 = _binary.i16be
b32 = _binary.i32be
def sz(s, o):
return s[o:s.index(b"\0", o)]
##
# Font file plugin for the X11 PCF format.
class PcfFontFile(FontFile.FontFile):
name = "name"
def __init__(self, fp):
magic = l32(fp.read(4))
if magic != PCF_MAGIC:
raise SyntaxError("not a PCF file")
FontFile.FontFile.__init__(self)
count = l32(fp.read(4))
self.toc = {}
for i in range(count):
type = l32(fp.read(4))
self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4))
self.fp = fp
self.info = self._load_properties()
metrics = self._load_metrics()
bitmaps = self._load_bitmaps(metrics)
encoding = self._load_encoding()
#
# create glyph structure
for ch in range(256):
ix = encoding[ch]
if ix is not None:
x, y, l, r, w, a, d, f = metrics[ix]
glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix]
self.glyph[ch] = glyph
def _getformat(self, tag):
format, size, offset = self.toc[tag]
fp = self.fp
fp.seek(offset)
format = l32(fp.read(4))
if format & 4:
i16, i32 = b16, b32
else:
i16, i32 = l16, l32
return fp, format, i16, i32
def _load_properties(self):
#
# font properties
properties = {}
fp, format, i16, i32 = self._getformat(PCF_PROPERTIES)
nprops = i32(fp.read(4))
# read property description
p = []
for i in range(nprops):
p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))))
if nprops & 3:
fp.seek(4 - (nprops & 3), 1) # pad
data = fp.read(i32(fp.read(4)))
for k, s, v in p:
k = sz(data, k)
if s:
v = sz(data, v)
properties[k] = v
return properties
def _load_metrics(self):
#
# font metrics
metrics = []
fp, format, i16, i32 = self._getformat(PCF_METRICS)
append = metrics.append
if (format & 0xff00) == 0x100:
# "compressed" metrics
for i in range(i16(fp.read(2))):
left = i8(fp.read(1)) - 128
right = i8(fp.read(1)) - 128
width = i8(fp.read(1)) - 128
ascent = i8(fp.read(1)) - 128
descent = i8(fp.read(1)) - 128
xsize = right - left
ysize = ascent + descent
append(
(xsize, ysize, left, right, width,
ascent, descent, 0)
)
else:
# "jumbo" metrics
for i in range(i32(fp.read(4))):
left = i16(fp.read(2))
right = i16(fp.read(2))
width = i16(fp.read(2))
ascent = i16(fp.read(2))
descent = i16(fp.read(2))
attributes = i16(fp.read(2))
xsize = right - left
ysize = ascent + descent
append(
(xsize, ysize, left, right, width,
ascent, descent, attributes)
)
return metrics
def _load_bitmaps(self, metrics):
#
# bitmap data
bitmaps = []
fp, format, i16, i32 = self._getformat(PCF_BITMAPS)
nbitmaps = i32(fp.read(4))
if nbitmaps != len(metrics):
raise IOError("Wrong number of bitmaps")
offsets = []
for i in range(nbitmaps):
offsets.append(i32(fp.read(4)))
bitmapSizes = []
for i in range(4):
bitmapSizes.append(i32(fp.read(4)))
# byteorder = format & 4 # non-zero => MSB
bitorder = format & 8 # non-zero => MSB
padindex = format & 3
bitmapsize = bitmapSizes[padindex]
offsets.append(bitmapsize)
data = fp.read(bitmapsize)
pad = BYTES_PER_ROW[padindex]
mode = "1;R"
if bitorder:
mode = "1"
for i in range(nbitmaps):
x, y, l, r, w, a, d, f = metrics[i]
b, e = offsets[i], offsets[i+1]
bitmaps.append(
Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x))
)
return bitmaps
def _load_encoding(self):
# map character code to bitmap index
encoding = [None] * 256
fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS)
firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2))
firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2))
default = i16(fp.read(2))
nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1)
for i in range(nencoding):
encodingOffset = i16(fp.read(2))
if encodingOffset != 0xFFFF:
try:
encoding[i+firstCol] = encodingOffset
except IndexError:
break # only load ISO-8859-1 glyphs
return encoding
|
xhat/micropython | refs/heads/master | tests/basics/string_rpartition.py | 61 | print("asdf".rpartition('g'))
print("asdf".rpartition('a'))
print("asdf".rpartition('s'))
print("asdf".rpartition('f'))
print("asdf".rpartition('d'))
print("asdf".rpartition('asd'))
print("asdf".rpartition('sdf'))
print("asdf".rpartition('as'))
print("asdf".rpartition('df'))
print("asdf".rpartition('asdf'))
print("asdf".rpartition('asdfa'))
print("asdf".rpartition('fasdf'))
print("asdf".rpartition('fasdfa'))
print("abba".rpartition('a'))
print("abba".rpartition('b'))
try:
print("asdf".rpartition(1))
except TypeError:
print("Raised TypeError")
else:
print("Did not raise TypeError")
try:
print("asdf".rpartition(''))
except ValueError:
print("Raised ValueError")
else:
print("Did not raise ValueError")
|
mandeepdhami/nova | refs/heads/master | nova/tests/unit/db/test_sqlalchemy_migration.py | 11 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import mock
import uuid
from migrate import exceptions as versioning_exceptions
from migrate import UniqueConstraint
from migrate.versioning import api as versioning_api
from oslo_db.sqlalchemy import utils as db_utils
import sqlalchemy
from nova.compute import flavors
from nova import context
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import migration
from nova import exception
from nova import objects
from nova import test
class TestNullInstanceUuidScanDB(test.TestCase):
# NOTE(mriedem): Copied from the 267 database migration.
def downgrade(self, migrate_engine):
UniqueConstraint('uuid',
table=db_utils.get_table(migrate_engine, 'instances'),
name='uniq_instances0uuid').drop()
for table_name in ('instances', 'shadow_instances'):
table = db_utils.get_table(migrate_engine, table_name)
table.columns.uuid.alter(nullable=True)
def setUp(self):
super(TestNullInstanceUuidScanDB, self).setUp()
self.engine = db_api.get_engine()
# When this test runs, we've already run the schema migration to make
# instances.uuid non-nullable, so we have to alter the table here
# so we can test against a real database.
self.downgrade(self.engine)
# Now create fake entries in the fixed_ips, consoles and
# instances table where (instance_)uuid is None for testing.
for table_name in ('fixed_ips', 'instances', 'consoles'):
table = db_utils.get_table(self.engine, table_name)
fake_record = {'id': 1}
table.insert().execute(fake_record)
def test_db_null_instance_uuid_scan_readonly(self):
results = migration.db_null_instance_uuid_scan(delete=False)
self.assertEqual(1, results.get('instances'))
self.assertEqual(1, results.get('consoles'))
# The fixed_ips table should be ignored.
self.assertNotIn('fixed_ips', results)
# Now pick a random table with an instance_uuid column and show it's
# in the results but with 0 hits.
self.assertEqual(0, results.get('instance_info_caches'))
# Make sure nothing was deleted.
for table_name in ('fixed_ips', 'instances', 'consoles'):
table = db_utils.get_table(self.engine, table_name)
record = table.select(table.c.id == 1).execute().first()
self.assertIsNotNone(record)
def test_db_null_instance_uuid_scan_delete(self):
results = migration.db_null_instance_uuid_scan(delete=True)
self.assertEqual(1, results.get('instances'))
self.assertEqual(1, results.get('consoles'))
# The fixed_ips table should be ignored.
self.assertNotIn('fixed_ips', results)
# Now pick a random table with an instance_uuid column and show it's
# in the results but with 0 hits.
self.assertEqual(0, results.get('instance_info_caches'))
# Make sure fixed_ips wasn't touched, but instances and instance_faults
# records were deleted.
fixed_ips = db_utils.get_table(self.engine, 'fixed_ips')
record = fixed_ips.select(fixed_ips.c.id == 1).execute().first()
self.assertIsNotNone(record)
consoles = db_utils.get_table(self.engine, 'consoles')
record = consoles.select(consoles.c.id == 1).execute().first()
self.assertIsNone(record)
instances = db_utils.get_table(self.engine, 'instances')
record = instances.select(instances.c.id == 1).execute().first()
self.assertIsNone(record)
@mock.patch.object(migration, 'db_version', return_value=2)
@mock.patch.object(migration, '_find_migrate_repo', return_value='repo')
@mock.patch.object(migration, '_db_sync_locked', return_value=False)
@mock.patch.object(versioning_api, 'upgrade')
@mock.patch.object(versioning_api, 'downgrade')
@mock.patch.object(migration, 'get_engine', return_value='engine')
class TestDbSync(test.NoDBTestCase):
def test_version_none(self, mock_get_engine, mock_downgrade, mock_upgrade,
mock_sync_locked, mock_find_repo, mock_version):
database = 'fake'
migration.db_sync(database=database)
mock_version.assert_called_once_with(database)
mock_find_repo.assert_called_once_with(database)
mock_get_engine.assert_called_once_with(database)
mock_upgrade.assert_called_once_with('engine', 'repo', None)
self.assertFalse(mock_downgrade.called)
def test_downgrade(self, mock_get_engine, mock_downgrade, mock_upgrade,
mock_sync_locked, mock_find_repo, mock_version):
database = 'fake'
migration.db_sync(1, database=database)
mock_version.assert_called_once_with(database)
mock_find_repo.assert_called_once_with(database)
mock_get_engine.assert_called_once_with(database)
mock_downgrade.assert_called_once_with('engine', 'repo', 1)
self.assertFalse(mock_upgrade.called)
@mock.patch.object(migration, '_find_migrate_repo', return_value='repo')
@mock.patch.object(versioning_api, 'db_version')
@mock.patch.object(migration, 'get_engine')
class TestDbVersion(test.NoDBTestCase):
def test_db_version(self, mock_get_engine, mock_db_version,
mock_find_repo):
database = 'fake'
mock_get_engine.return_value = 'engine'
migration.db_version(database)
mock_find_repo.assert_called_once_with(database)
mock_db_version.assert_called_once_with('engine', 'repo')
def test_not_controlled(self, mock_get_engine, mock_db_version,
mock_find_repo):
database = 'api'
mock_get_engine.side_effect = ['engine', 'engine', 'engine']
exc = versioning_exceptions.DatabaseNotControlledError()
mock_db_version.side_effect = [exc, '']
metadata = mock.MagicMock()
metadata.tables.return_value = []
with mock.patch.object(sqlalchemy, 'MetaData',
metadata), mock.patch.object(migration,
'db_version_control') as mock_version_control:
migration.db_version(database)
mock_version_control.assert_called_once_with(0, database)
db_version_calls = [mock.call('engine', 'repo')] * 2
self.assertEqual(db_version_calls, mock_db_version.call_args_list)
engine_calls = [mock.call(database)] * 3
self.assertEqual(engine_calls, mock_get_engine.call_args_list)
@mock.patch.object(migration, '_find_migrate_repo', return_value='repo')
@mock.patch.object(migration, 'get_engine', return_value='engine')
@mock.patch.object(versioning_api, 'version_control')
class TestDbVersionControl(test.NoDBTestCase):
def test_version_control(self, mock_version_control, mock_get_engine,
mock_find_repo):
database = 'fake'
migration.db_version_control(database=database)
mock_find_repo.assert_called_once_with(database)
mock_version_control.assert_called_once_with('engine', 'repo', None)
class TestGetEngine(test.NoDBTestCase):
def test_get_main_engine(self):
with mock.patch.object(db_api, 'get_engine',
return_value='engine') as mock_get_engine:
engine = migration.get_engine()
self.assertEqual('engine', engine)
mock_get_engine.assert_called_once_with()
def test_get_api_engine(self):
with mock.patch.object(db_api, 'get_api_engine',
return_value='api_engine') as mock_get_engine:
engine = migration.get_engine('api')
self.assertEqual('api_engine', engine)
mock_get_engine.assert_called_once_with()
class TestFlavorCheck(test.TestCase):
def setUp(self):
super(TestFlavorCheck, self).setUp()
self.context = context.get_admin_context()
self.migration = importlib.import_module(
'nova.db.sqlalchemy.migrate_repo.versions.'
'291_enforce_flavors_migrated')
self.engine = db_api.get_engine()
def test_upgrade_clean(self):
inst = objects.Instance(context=self.context,
uuid=uuid.uuid4(),
user_id=self.context.user_id,
project_id=self.context.project_id,
system_metadata={'foo': 'bar'})
inst.create()
self.migration.upgrade(self.engine)
def test_upgrade_dirty(self):
inst = objects.Instance(context=self.context,
uuid=uuid.uuid4(),
user_id=self.context.user_id,
project_id=self.context.project_id,
system_metadata={'foo': 'bar',
'instance_type_id': 'foo'})
inst.create()
self.assertRaises(exception.ValidationError,
self.migration.upgrade, self.engine)
def test_upgrade_flavor_deleted_instances(self):
inst = objects.Instance(context=self.context,
uuid=uuid.uuid4(),
user_id=self.context.user_id,
project_id=self.context.project_id,
system_metadata={'foo': 'bar',
'instance_type_id': 'foo'})
inst.create()
inst.destroy()
self.migration.upgrade(self.engine)
def test_upgrade_flavor_deleted_sysmeta(self):
flavor = flavors.get_default_flavor()
sysmeta = flavors.save_flavor_info({}, flavor)
sysmeta['foo'] = 'bar'
inst = objects.Instance(context=self.context,
uuid=uuid.uuid4(),
user_id=self.context.user_id,
project_id=self.context.project_id,
system_metadata=sysmeta)
inst.create()
sysmeta = db_api.instance_system_metadata_get(self.context,
inst.uuid)
sysmeta = {k: v for k, v in sysmeta.items()
if not k.startswith('instance_type_')}
db_api.instance_system_metadata_update(self.context, inst.uuid,
sysmeta, True)
inst.refresh()
self.assertEqual({'foo': 'bar'}, inst.system_metadata)
self.migration.upgrade(self.engine)
def test_upgrade_flavor_already_migrated(self):
flavor = flavors.get_default_flavor()
sysmeta = flavors.save_flavor_info({}, flavor)
sysmeta['foo'] = 'bar'
inst = objects.Instance(context=self.context,
uuid=uuid.uuid4(),
user_id=self.context.user_id,
project_id=self.context.project_id,
system_metadata=sysmeta)
inst.create()
# Trigger the migration by lazy-loading flavor
inst.flavor
inst.save()
self.assertNotIn('instance_type_id', inst.system_metadata)
sysmeta = db_api.instance_system_metadata_get(self.context,
inst.uuid)
self.assertEqual({'foo': 'bar'}, sysmeta)
self.migration.upgrade(self.engine)
|
lichengshuang/createvhost | refs/heads/master | python/asher/idrac/bin/idrac.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import sys
import os
import IPy
import pexpect
from multiprocessing import Pool
import ConfigParser
#version 1.3
#更新返回st号有wrong的问题
#解决返回main(bios)AssertionError问题
#解决返回值为NONE的情况
#NONE为无返回值
## 20170921 加处理日志的东西
## 20171010 添加了升级bios idrac版本的功能
## 20171011 添加了升级bios 添加计划任务的功能
## 20171106 修改了getbiosupdates 在收集信息的时候 将stg和机型也给拿到
## 20171219 关于日志raid和idrac 处理完的结果分割修改成; 分割。
##20180423 增加了获取cpu配置的功能
def operatestg(ip,lines):
#只返回stg号和设备型号
for a in lines:
st1=re.match('Service Tag.*',a)
model=re.match('System Model.*\=\s(.*)',a)
if st1:
st=re.search('[0-9A-Z]{7}',st1.group()).group()
if model:
mode=model.groups()[0].strip()
if st:
nstrs = '%s,%s,%s' % (ip,mode,st)
return nstrs
###Check cpu config
def getCpuInfo(ip,lines):
#只返回cpu是否开启睿频和性能最大化
results = []
results.append(ip)
for a in lines:
SysProfile=re.match('#?SysProfile\=(.*)',a)
ProcPwrPerf=re.match('#?ProcPwrPerf\=(.*)',a)
ProcTurboMode=re.match('#?ProcTurboMode\=(.*)',a)
if SysProfile:
results.append(SysProfile.groups()[0].strip())
if ProcPwrPerf:
results.append(ProcPwrPerf.groups()[0].strip())
if ProcTurboMode:
results.append(ProcTurboMode.groups()[0].strip())
if len(results) > 1:
raidstr = ','.join(results)
return raidstr
###Check temp config
###温度
def getTempInfo(ip,lines):
#只返回 温度
results = []
results.append(ip)
for a in lines:
SystemBoardIn=re.match('System Board Inlet Temp(.*)',a)
SystemBoardOut=re.match('System Board Exhaust Temp(.*)',a)
if SystemBoardIn:
#results.append(SystemBoardIn.groups()[0].strip())
temp1=SystemBoardIn.groups()[0].strip().split()[1].replace('C','')
results.append(temp1)
if SystemBoardOut:
temp2=SystemBoardOut.groups()[0].strip().split()[1].replace('C','')
results.append(temp2)
if len(results) > 1:
raidstr = ','.join(results)
return raidstr
#getbiosJobView
#getbiosJobView
def getbiosupdates(ip,lines):
##用于处理确定bios升级状态。进入待重启状态。
#results1 = []
results = []
results.append('%s' % ip)
for a in lines:
jobname=re.match('Job Name\=(.*)',a)
jobstatus=re.match('Status\=(.*)',a)
jobtime=re.match('Start Time\=(.*)',a)
jobinfo=re.match('Message\=(.*)',a)
if jobname:
results.append(jobname.groups()[0].strip())
if jobstatus:
results.append(jobstatus.groups()[0].strip())
if jobtime:
results.append(jobtime.groups()[0].strip())
if jobinfo:
infors = jobinfo.groups()[0].strip()
if 'scheduled' in infors:
results.append('scheduled')
elif 'completed' in infors:
results.append('completed')
else:
results.append(infors)
results.append('|')
if len(results) > 1:
raidstr = ','.join(results)
results1 = []
newdstr = raidstr.strip('|').strip(',').split('|')
for ni in range(len(newdstr)):
if 'BIOS' in newdstr[ni]:
tmps = newdstr[ni].strip(',')
return tmps
else:
newdstr.append(newdstr[ni])
if len(results1) > 1:
tmpstr = ','.join(results1)
return tmpstr
def operatemacs(ip,lines):
##专门处理mac地址和stg的函数,方便再次组合调用.
for a in lines:
#匹配支持630和430
#print a
bmc=re.match('Current IP Address =.*',a)
#print "bmc is",bmc
br3=re.match(r'NIC.Integrated.1-4-1.*|NIC.Embedded.4-1-1.*',a)
#print "br3 is",br3
br2=re.match(r'NIC.Integrated.1-3-1.*|NIC.Embedded.3-1-1.*',a)
#print "br2 is",br2
br1=re.match('NIC.Integrated.1-2-1.*|NIC.Embedded.2-1-1.*',a)
#print "br1 is",br1
br0=re.match(r'NIC.Integrated.1-1-1.*|NIC.Embedded.1-1-1.*',a)
#print "br0 is",br0
st1=re.match('Service Tag.*',a)
model=re.match('System Model.*\=\s(.*)',a)
biosversion=re.match('System BIOS Version.*\=\s(.*)',a)
dracversion=re.match('Firmware Version.*\=\s(.*)',a)
#print "st is",st1
if br0:
b0=re.search('..:..:..:..:..:..',br0.group()).group()
if br1:
b1=re.search('..:..:..:..:..:..',br1.group()).group()
if br2:
b2=re.search('..:..:..:..:..:..',br2.group()).group()
if br3:
b3=re.search('..:..:..:..:..:..',br3.group()).group()
if st1:
st=re.search('[0-9A-Z]{7}',st1.group()).group()
if model:
mode=model.groups()[0].strip()
if biosversion:
biosv=biosversion.groups()[0].strip()
if dracversion:
dracv=dracversion.groups()[0].strip()
try:
if st:
nstrs = '%s,%s,%s,%s,%s' % (ip,mode,st,biosv,dracv)
if b0:
strs = '%s,%s,%s,%s,%s,%s|%s|%s|%s' % (ip,mode,st,biosv,dracv,b0,b1,b2,b3)
print strs
return strs
else:
strs = ip + ",WRONG," + "NOTDATA"
print strs
return strs
except UnboundLocalError:
strs = ip + ",WRONG," + "OTHERSERROR"
if st:
print nstrs
return nstrs
else:
return strs
except:
strs = ip + ",WRONG," + "OTHERSERROR"
if st:
print nstrs
return nstrs
else:
return strs
def operateraid(ip,lines):
#收集硬盘信息的函数
results = []
results.append('%s,' % ip)
for std in lines:
size=re.match('\s+Size.*\=\s(.*)',std)
disk=re.match('Disk.Bay.(\d+):.*',std)
Pro=re.match('\s+Manufacturer.*\=\s(.*)',std)
serialnum=re.match('\s+SerialNumber\s+=\s(.*)',std)
if size:
sizes = size.groups()[0].strip()
results.append(sizes)
if disk:
results.append(disk.groups()[0].strip())
if Pro:
results.append(Pro.groups()[0].strip())
if serialnum:
results.append(serialnum.groups()[0].strip())
if len(results) > 1:
raidstr = '|'.join(results)
#print raidstr
return raidstr
else:
results.append('NODISKDATA')
raidstr = '|'.join(results)
return raidstr
def operateraidstatus(ip,lines):
#收集硬盘log信息的函数
results = []
results.append('%s,' % ip)
for std in lines:
size=re.match('\s+Size.*\=\s(.*)',std)
disk=re.match('Disk.Bay.(\d+):.*',std)
status=re.match('\s+Status\s+\=\s(.*)',std)
Pro=re.match('\s+Manufacturer.*\=\s(.*)',std)
serialnum=re.match('\s+SerialNumber\s+=\s(.*)',std)
mediatype=re.match('\s+MediaType\s+=\s(.*)',std)
if size:
sizes = size.groups()[0].strip()
results.append(sizes)
if disk:
results.append(disk.groups()[0].strip())
if status:
results.append(status.groups()[0].strip())
if Pro:
results.append(Pro.groups()[0].strip())
if mediatype:
results.append(mediatype.groups()[0].strip())
if serialnum:
results.append(serialnum.groups()[0].strip())
results.append(',')
if len(results) > 1:
raidstr = '|'.join(results)
results1 = []
results1.append('%s' % ip)
newdstr = raidstr.strip(',').strip('|').split(',')
disknu = len(newdstr) - 1
results1.append('DiskNU:%s' % disknu)
if ( 'Warning' in raidstr ) or 'Unknown' in raidstr or 'Failed' in raidstr:
for sei in range(len(newdstr)):
#print newdstr[sei]
tempstrs = str(newdstr[sei].strip('|'))
if 'Warning' in tempstrs or 'Unknown' in tempstrs or 'Failed' in tempstrs:
results1.append('%s' % tempstrs)
else:
results1.append('RAIDLOG:OK')
if len(results1) > 1:
#results1.append('OK')
raidstr1 = ','.join(results1)
# print raidstr1
return raidstr1
else:
results.append('NODISKDATA')
raidstr = '|'.join(results)
return raidstr
def operatesel(ip,lines):
#处理sel log的函数
results = []
results.append('%s' % ip)
for std in lines:
sdate = re.match('Date/Time:\s+(.*)',std)
severity = re.match('Severity:\s+(.*)',std)
problem = re.match('Description:\s(.*)',std)
if severity:
serity= severity.groups()[0].strip()
results.append(serity)
if sdate:
results.append(sdate.groups()[0].strip())
if problem:
results.append(problem.groups()[0].strip())
results.append(',')
if len(results) > 1:
raidstr = '|'.join(results)
# print raidstr
results1 = []
results1.append('%s' % ip)
newdstr = raidstr.strip(',').strip('|').split(',')
num = 0
if 'Critical' in raidstr:
for sei in range(len(newdstr)):
if 'Critical' in newdstr[sei]:
if 'Non-Critical' in newdstr[sei]:
pass
else:
if num < 2:
# print newdstr[sei]
results1.append(newdstr[sei].strip('|'))
num += 1
else:
break
else:
# print 'ok'
results1.append('IDRACLOG:OK')
if len(results1) > 1:
#results1.append('OK')
raidstr1 = ','.join(results1)
# print raidstr1
return raidstr1
else:
results.append('NOINFO')
raidstr = ','.join(results)
return raidstr
def ssh_cmd(user, passwd, ip, port, cmd):
newlist = []
#此列表是为了装多个不通命令组合设计的, 因为之前关于有数据收集的时候都是每次运行一次,然后单独处理 返回。 那么这次是将一堆命令都放
#进来 然后处理后 一次性返回。
#print len(cmd)
for cmdi in range(len(cmd)):
# print cmdi
cmdm = cmd[cmdi]
print ip + "\t" + cmdm
ssh = pexpect.spawn('ssh -p%s %s@%s "%s"' % (port, user, ip, cmdm))
try:
i = ssh.expect(['password:', 'continue connecting (yes/no)?'], timeout=20)
if i == 0 :
ssh.sendline(passwd)
elif i == 1:
ssh.sendline('yes\n')
ssh.expect('password: ')
ssh.sendline(passwd)
ssh.sendline(cmdi)
except pexpect.EOF:
if key == 'getmacs' or key == 'raid' or key == 'getsel' :
strs = ip + ",WRONG," + "EOF"
print strs
return strs
ssh.close()
except pexpect.TIMEOUT:
if key == 'getmacs' or key == 'raid' or key == 'getsel' :
strs = ip + ",WRONG," + "TIMEOUT"
print strs
return strs
ssh.close()
except:
if key == 'getmacs' or key == 'raid' or key == 'getsel' :
strs = ip + ",WRONG," + "NONE_PASSWORDWONG"
print strs
return strs
ssh.close()
else:
if key == 'getmacs':
lines=ssh.readlines()
return operatemacs(ip,lines)
elif key == 'raid':
lines=ssh.readlines()
return operateraid(ip,lines)
elif key == 'getsel':
lines=ssh.readlines()
return operatesel(ip,lines)
#elif key == 'getcpu':
# lines=ssh.readlines()
# return getCpuInfo(ip,lines)
#def getTempInfo(ip,lines):
###20180420 操作Temp配置
elif key == 'getTemp':
lines=ssh.readlines()
if cmdi == 0:
stgtmp = operatestg(ip,lines).split(',')
if len(stgtmp) > 2:
newlist.extend(stgtmp)
ssh.close()
continue
if cmdi == 1:
#return getCpuInfo(ip,lines)
raidtmp=getTempInfo(ip,lines).split(',')
#raidtmp=getCpuInfo(ip,lines)
if len(raidtmp) > 1:
#newlist.append('%s' % raidtmp[1])
newlist.extend(raidtmp[1:])
ssh.close()
continue
###20180420 操作cpu配置
elif key == 'getcpu':
lines=ssh.readlines()
if cmdi == 0:
stgtmp = operatestg(ip,lines).split(',')
if len(stgtmp) > 2:
newlist.extend(stgtmp)
ssh.close()
continue
if cmdi == 1:
#return getCpuInfo(ip,lines)
raidtmp=getCpuInfo(ip,lines).split(',')
#raidtmp=getCpuInfo(ip,lines)
if len(raidtmp) > 1:
#newlist.append('%s' % raidtmp[1])
newlist.extend(raidtmp[1:])
ssh.close()
continue
##因为之前getbiosJobView 输出结果没有stg和机型,所以调整以下
elif key == 'getbiosJobView':
lines=ssh.readlines()
if cmdi < 1:
stgtmp = operatestg(ip,lines).split(',')
if len(stgtmp) > 2:
newlist.extend(stgtmp)
ssh.close()
continue
if cmdi < 2:
raidtmp = getbiosupdates(ip,lines).split(',')
if len(raidtmp) > 1:
#newlist.append('%s' % raidtmp[1])
newlist.extend(raidtmp[1:])
ssh.close()
continue
elif key == 'allreport':
lines=ssh.readlines()
if cmdi < 1:
stgtmp = operatestg(ip,lines).split(',')
if len(stgtmp) > 2:
newlist.extend(stgtmp)
ssh.close()
continue
if cmdi < 2:
#operateraidstatus
raidtmp = operateraidstatus(ip,lines).split(',')
# print raidtmp
if len(raidtmp) > 1:
#newlist.append('%s' % raidtmp[1])
#newlist.extend(raidtmp[1:])
##将raid信息部分不再以,分割,而是以;分割,方便之后进行固定提取 raid日志、idrac日志
newraidinfo = ';'.join(raidtmp[1:])
newlist.append('%s' % newraidinfo)
ssh.close()
continue
if cmdi < 3:
raidtmp = operatesel(ip,lines).split(',')
# print raidtmp
if len(raidtmp) > 1:
newraidinfo = ';'.join(raidtmp[1:])
# newlist.extend(newraidinfo)
#newlist.append('%s' % raidtmp[1])
newlist.append('%s' % newraidinfo)
ssh.close()
continue
else:
for a in ssh.readlines():
jieguo = re.match('.*successful',a)
if jieguo:
print ip + "\t" + cmdm + "\tok"
ssh.close()
# print newlist
if len(newlist) > 2:
tmpstr = raidstr = ','.join(newlist)
# print tmpstr
return tmpstr
def getnfsip(ip,nfsservers):
"""
通过ip地址获取所在机房的nfsserver的ip地址
此函数也仅用于在固件升级的时候去使用
"""
ip1 = ip
#ipss是 所有nfsserver的ip信息列表,下面会先算他的网段,然后在让进来的ip地址取匹配他属于哪个nfsserver
#假如匹配不上那就回复一个默认nfsserver地址,可能不一定好用
ipss = nfsservers.split()
skd = ''
for i in range(len(ipss)):
nfs=ipss[i].split('/')[0]
IPD=IPy.IP('%s' % ipss[i] , make_net=True)
if ip1 in IPy.IP('%s' % IPD):
# print ip1 + ' is in ' + nfs
sdk = nfs
return nfs
# break
if skd == '':
#假如没有找打那就返回默认的nfsserver的ip地址,不一定能用
return nfsserver
def main(keys):
"""
1. 此函数为执行ssh并发调用的主函数
macs 为一个字典用于存放ssh并发执行时候返回的结果,目前只在ssh调用中对getmacs命令信息进行返回
其他的命令 为执行ssh命令 无需返回值
2. key 为命令,因为命令在下面cmds字典进行了具体条数定义
"""
global macs,key
global fileName,binPath,basePath,confPath,writelog,alldracip,rundracip,sshport,sshuser,sshpassword,newpasswd,nfsserver
###获取绝对路径
fileName = os.path.abspath(__file__)
binPath = os.path.dirname(os.path.realpath(__file__))
basePath = os.path.dirname(binPath)
confPath = basePath + '/config/'
###读取idrac密码等信息,以及ip地址存放的位置
conf = ConfigParser.ConfigParser()
#if os.path.isfile(local_file):
#return True
conf.read("%s/config.ini" % confPath)
FilesPath = conf.get('globle','filepath')
if not os.path.isdir(FilesPath):
os.mkdir(FilesPath)
writelog = FilesPath + conf.get("globle", "writeniclog")
reportlog = FilesPath + conf.get("globle", "reportlog")
raidlog = FilesPath + conf.get("globle", "raidlog")
alldracip = confPath + conf.get("globle", "alldracip")
rundracip = confPath + conf.get("globle", "runracip")
process = int(conf.get("globle", "processes"))
newpasswd = conf.get("globle", "newpasswd")
ntpserver = conf.get("globle", "ntpserver")
timezone = conf.get("globle", "timezone")
nfsserver = conf.get("globle", "nfsserver")
nfsservers = conf.get("globle", "nfsservers")
pathurl = conf.get("globle", "pathurl")
biospkgs = conf.get("globle", "biospkgs")
idracpkgs = conf.get("globle", "idracpkgs")
sshport = conf.get("globle", "port")
sshuser = conf.get("dell", "user")
sshpassword = conf.get("dell", "password")
macs = {}
if keys == 'get_all_macs':
key = 'getmacs'
writeniclog = writelog
filedracip = alldracip
elif keys == 'raid':
key = 'raid'
filedracip = alldracip
writeniclog = raidlog
elif keys == 'power_all_off':
key = 'poweroff'
filedracip = alldracip
else:
key = keys
filedracip = rundracip
writeniclog = writelog
user = sshuser
password = sshpassword
port = sshport
cmds = {
'getmacs':[
'racadm getsysinfo'
],
'getcpu':[
'racadm getsysinfo',
'racadm get BIOS.SysProfileSettings.',
],
'clear':[
'racadm jobqueue delete --all',
],
'reset':[
'racadm racreset',
],
'pxe':[
'racadm jobqueue delete --all',
'racadm config -g cfgServerInfo -o cfgServerFirstBootDevice pxe',
'racadm config -g cfgServerInfo -o cfgServerBootOnce 1',
'racadm serveraction powercycle',
],
'reboot':[
'racadm serveraction powercycle'
],
'poweroff':[
'racadm serveraction powerdown'
],
'poweron':[
'racadm serveraction powerup'
],
'getTemp':[
'racadm getsysinfo',
'racadm getsensorinfo'
],
'biosturbo':[
'racadm jobqueue delete --all',
'racadm set bios.procsettings.controlledturbo Enabled',
'racadm jobqueue create BIOS.Setup.1-1',
'racadm serveraction powercycle'
],
'biosdiscpu':[
'racadm jobqueue delete --all',
'racadm set BIOS.ProcSettings.LogicalProc Disabled',
'racadm jobqueue create BIOS.Setup.1-1',
'racadm serveraction powercycle'
],
'bios':[
'racadm jobqueue delete --all',
'racadm set BIOS.MiscSettings.ErrPrompt Disabled',
'racadm set BIOS.SysProfileSettings.SysProfile PerfOptimized ',
'racadm set BIOS.BiosBootSettings.BootSeq HardDisk.List.1-1,NIC.Integrated.1-1-1'
'racadm jobqueue create BIOS.Setup.1-1',
'racadm set NIC.nicconfig.1.LegacyBootProto PXE',
'racadm set NIC.nicconfig.3.LegacyBootProto NONE',
'racadm jobqueue create NIC.Integrated.1-1-1',
'racadm jobqueue create NIC.Integrated.1-3-1',
'racadm serveraction powercycle',
'racadm set iDRAC.IPMILan.1.Enable Enabled',
'racadm set BIOS.MiscSettings.ErrPrompt Disabled'
],
'zulong':[
'racadm jobqueue delete --all',
'racadm set NIC.nicconfig.3.LegacyBootProto pxE',
'racadm jobqueue create NIC.Integrated.1-3-1'
],
'allreport':[
'racadm getsysinfo',
'racadm raid get pdisks -o',
'racadm getsel'
],
'raid':[
'racadm raid get pdisks -o'
],
## 修改idrac密码
'changepasswd':[
'racadm config -g cfgUserAdmin -o cfgUserAdminPassword -i 2 %s' % newpasswd
],
# getsel
'getsel':[
'racadm getsel'
],
'getbiosJobView':[
'racadm getsysinfo',
'racadm jobqueue view'
],
# getsel
'clearlog':[
'racadm clrsel'
],
## 设置ntp server
'setntp':[
'racadm set idrac.NTPConfigGroup.NTP1 %s' % ntpserver ,
'racadm set idrac.NTPConfigGroup.NTPEnable Enabled',
'racadm set idrac.time.Timezone %s' % timezone
],
'ipmi':[
'racadm set iDRAC.IPMILan.1.Enable Enabled',
'racadm set idrac.time.Timezone %s' % timezone
]
}
pool = Pool(processes=process)
dracips = []
result = []
with open(filedracip,'r') as f:
for ii in f.readlines():
abc = ii.split()
dracips.append(abc[0])
for i in range(len(dracips)):
ip = dracips[i]
if key == 'idracUpdate' or key == 'biosUpdate':
##对于升级固件,用各地管理机的nfsserver 提供支持,
##由下面一行去根据每一个ip地址去算装机的nfsserverip。
nfsip = getnfsip(ip,nfsservers)
cmds = {
'biosUpdate':[
'racadm jobqueue delete --all',
#'racadm update -f %s -e %s -a TRUE -t HTTP' % (biospkgs, pathurl),
'racadm update -f %s -l %s:%s ' % (biospkgs, nfsip, pathurl)
#'racadm serveraction powercycle'
],
##升级idrac
'idracUpdate':[
'racadm jobqueue delete --all',
#'racadm update -f %s -e %s -a TRUE -t HTTP' % (idracpkgs, pathurl),
#'racadm update -f %s -l %s:%s ' % (idracpkgs, nfsserver, pathurl),
'racadm update -f %s -l %s:%s ' % (idracpkgs, nfsip, pathurl),
#'racadm serveraction powercycle'
]
}
try:
get_ssh_result = pool.apply_async(ssh_cmd,(user, password, ip, port, cmds[key],))
except w:
print w,ip
else:
result.append(get_ssh_result)
pool.close()
pool.join()
files = []
if key == 'getmacs' or key == 'raid' or key == 'getsel' or key == 'allreport' or key == 'getbiosJobView' or key == 'getcpu' or key == 'getTemp':
#if key == 'getmacs' or key == 'raid' or key == 'getsel' or key == 'allreport':
for res in result:
#print "res is"
#print res
try:
if key == 'idracUpdate' or key == 'biosUpdate':
i2 = res.get(timeout=120,)
else:
i2 = res.get(timeout=60,)
except:
i2 = "something wrong"
# print i2
else:
#i2 = str(i2)
#if not re.search(r'wrong|None',i2,re.I):
files.append(i2)
#if key == 'getmacs' or key == 'raid':
if key == 'getmacs' or key == 'raid' or key == 'getsel' or key == 'allreport' or key == 'getbiosJobView' or key == 'getcpu' or key == 'getTemp':
#if key == 'getmacs' or key == 'raid' or key == 'getsel' or key == 'allreport' or key == 'getbiosJobView' or key == 'getcpu' :
#if key == 'getmacs' or key == 'raid' or key == 'getsel' or key == 'allreport' or key == 'getbiosJobView':
#if key == 'getmacs' or key == 'raid' or key == 'getsel':
with open(writeniclog,'w') as ff:
for filel in range(len(files)):
if files[filel]:
ff.write("%s\n" % (files[filel]))
if key == 'allreport':
#收集报警信息的时候,把正常的剔除掉
#if key == 'getmacs' or key == 'raid' or key == 'getsel':
with open(reportlog,'w') as ff:
for filel in range(len(files)):
if files[filel]:
if ('RAIDLOG:OK' in files[filel] or 'NODISKDATA' in files[filel] ) and 'IDRACLOG:OK' in files[filel]:
pass
else:
ff.write("%s\n" % (files[filel]))
if __name__ == '__main__':
#global key
###默认传入获取服务器信息的
# keys = 'get_all_macs'
#keys = 'allreport'
#keys = 'clear'
keys = 'getTemp'
#keys = 'getbiosJobView'
#keys = 'reset'
#keys = 'biosUpdate'
#keys = 'idracUpdate'
#keys = 'getsel'
#keys = 'raid'
main(keys)
#main(keys1)
|
mttr/django | refs/heads/master | tests/test_client/auth_backends.py | 315 | from django.contrib.auth.backends import ModelBackend
class TestClientBackend(ModelBackend):
pass
|
unioslo/cerebrum | refs/heads/master | Cerebrum/modules/no/hiof/mod_sap_codes.py | 1 | # -*- coding: utf-8 -*-
#
# Copyright 2007-2019 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This file provides code values to be used with HiØf's SAP extension to
Cerebrum -- mod_sap.
This file is meant for any extra codes needed by the institution, or other
descriptions of the same codes for whatever reason. Make sure to link to this
file after the general codes in cereconf.CLASS_CONSTANTS.
"""
from __future__ import unicode_literals
from Cerebrum import Constants
from Cerebrum.modules.no.Constants import SAPLonnsTittelKode
class SAPConstants(Constants.Constants):
"""
This class embodies all constants that we need to address HR-related
tables in HiØ.
"""
# ----[ SAPLonnsTittelKode ]----------------------------------
sap_1007_hogskolelaerer_ovings = SAPLonnsTittelKode(
"20001007",
"1007 Høgskolelærer/øvings",
"VIT"
)
sap_1012_hogskoledosent = SAPLonnsTittelKode(
"20001012",
"1012 Høgskoledosent",
"ØVR"
)
sap_1019_vitenskapelig_assist = SAPLonnsTittelKode(
"20001019",
"1019 Vitenskapelig assist",
"VIT"
)
sap_1020_vitenskapelig_assist = SAPLonnsTittelKode(
"20001020",
"1020 Vitenskapelig assist",
"VIT"
)
sap_1203_fagarbeider_m_fagbre = SAPLonnsTittelKode(
"20001203",
"1203 Fagarbeider med fagbrev",
"ØVR"
)
sap_0000_ekstern_stillingskode = SAPLonnsTittelKode(
"00000000",
"0000 Ekstern tilsatt, ikke lønnet av HiØf",
"ØVR"
)
|
minsoopark/starbucks-py | refs/heads/master | setup.py | 1 | from setuptools import setup, find_packages
setup(
name='Starbucks',
packages=find_packages(),
version='0.5.3',
description='Unoffical Starbucks API.',
long_description=open('README.rst').read(),
license='BSD License',
author='Minsoo Park',
author_email='minsoo1003@gmail.com',
url='https://github.com/minsoopark/starbucks-py',
keywords=['Starbucks'],
install_requires=[
'click >= 3.3', 'requests >= 2.5.3', 'lxml >= 3.0.0'
],
entry_points='''
[console_scripts]
starbucks-card = starbucks.cli:card_info
starbucks-star = starbucks.cli:star_info
'''
)
|
Frodox/buildbot | refs/heads/master | master/buildbot/test/fake/libvirt.py | 10 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
class Domain(object):
def __init__(self, name, conn):
self.conn = conn
self._name = name
self.running = False
def name(self):
return self._name
def create(self):
self.running = True
def shutdown(self):
self.running = False
def destroy(self):
self.running = False
del self.conn[self._name]
class Connection(object):
def __init__(self, uri):
self.uri = uri
self.domains = {}
def createXML(self, xml, flags):
# FIXME: This should really parse the name out of the xml, i guess
d = self.fake_add("instance")
d.running = True
return d
def listDomainsID(self):
return list(self.domains)
def lookupByName(self, name):
return self.domains[name]
def lookupByID(self, ID):
return self.domains[ID]
def fake_add(self, name):
d = Domain(name, self)
self.domains[name] = d
return d
def open(uri):
return Connection(uri)
|
lensacom/satyr | refs/heads/master | mentor/tests/test_framework.py | 1 | from __future__ import absolute_import, division, print_function
import pytest
from mentor.messages import PythonTask
from mentor.proxies.messages import (CommandInfo, ContainerInfo, Cpus, Disk,
DockerInfo, Mem, TaskID, TaskInfo)
from mentor.scheduler import QueueScheduler, Running
from mentor.utils import RemoteException
@pytest.fixture
def command():
task = TaskInfo(name='test-task',
id=TaskID(value='test-task-id'),
resources=[Cpus(0.1), Mem(64)],
command=CommandInfo(value='echo 100'))
return task
@pytest.fixture
def docker_command():
task = TaskInfo(name='test-docker-task',
id=TaskID(value='test-docker-task-id'),
resources=[Cpus(0.1), Mem(64)],
command=CommandInfo(value='echo 100'),
container=ContainerInfo(
type='DOCKER',
docker=DockerInfo(image='daskos/mentor')))
return task
@pytest.fixture
def docker_python():
task = PythonTask(id=TaskID(value='test-python-task-id'),
fn=sum, args=[range(5)],
name='test-python-task-name',
resources=[Cpus(0.1), Mem(64), Disk(0)])
return task
def test_command(mocker, command):
sched = QueueScheduler()
mocker.spy(sched, 'on_update')
with Running(sched, name='test-scheduler'):
sched.submit(command)
sched.wait() # block until all tasks finishes
calls = sched.on_update.call_args_list
assert len(calls) == 2
args, kwargs = calls[0]
assert args[1].task_id.value == 'test-task-id'
assert args[1].state == 'TASK_RUNNING'
args, kwargs = calls[1]
assert args[1].task_id.value == 'test-task-id'
assert args[1].state == 'TASK_FINISHED'
def test_docker_command(mocker, docker_command):
sched = QueueScheduler()
mocker.spy(sched, 'on_update')
with Running(sched, name='test-scheduler'):
sched.submit(docker_command)
sched.wait() # block until all tasks finishes
calls = sched.on_update.call_args_list
assert len(calls) == 2
args, kwargs = calls[0]
assert args[1].task_id.value == 'test-docker-task-id'
assert args[1].state == 'TASK_RUNNING'
args, kwargs = calls[1]
assert args[1].task_id.value == 'test-docker-task-id'
assert args[1].state == 'TASK_FINISHED'
def test_docker_python(mocker, docker_python):
sched = QueueScheduler()
mocker.spy(sched, 'on_update')
with Running(sched, name='test-scheduler'):
sched.submit(docker_python)
sched.wait() # block until all tasks finishes
calls = sched.on_update.call_args_list
assert len(calls) == 2
args, kwargs = calls[0]
assert args[1].task_id.value == 'test-python-task-id'
assert args[1].state == 'TASK_RUNNING'
args, kwargs = calls[1]
assert args[1].task_id.value == 'test-python-task-id'
assert args[1].state == 'TASK_FINISHED'
def test_docker_python_exception():
sched = QueueScheduler()
def error():
raise TypeError('Dummy exception on executor side!')
task = PythonTask(id=TaskID(value='test-python-task-id'),
fn=error, name='test-python-task-name',
resources=[Cpus(0.1), Mem(64), Disk(0)])
with Running(sched, name='test-scheduler'):
sched.submit(task)
sched.wait()
assert task.status.has_failed()
assert isinstance(task.status.exception, RemoteException)
assert isinstance(task.status.exception, TypeError)
def test_parallel_execution(mocker, docker_python):
sched = QueueScheduler()
mocker.spy(sched, 'on_update')
with Running(sched, name='test-scheduler'):
tasks = []
for i in range(3):
task = PythonTask(id=TaskID(value='test-python-task-{}'.format(i)),
fn=sum, args=[[1, 10, i]],
name='test-python-task-name',
resources=[Cpus(0.1), Mem(64), Disk(0)])
sched.submit(task)
tasks.append(task)
sched.wait() # block until all tasks finishes
assert [t.status.data for t in tasks] == [11, 12, 13]
def test_sequential_execution(mocker, docker_python):
sched = QueueScheduler()
mocker.spy(sched, 'on_update')
with Running(sched, name='test-scheduler'):
tasks = []
for i in range(3):
task = PythonTask(id=TaskID(value='test-python-task-{}'.format(i)),
fn=sum, args=[[1, 10, i]],
name='test-python-task-name',
resources=[Cpus(0.1), Mem(64), Disk(0)])
sched.submit(task)
tasks.append(task)
sched.wait()
assert task.status.data == 11 + i
def test_docker_python_result(mocker, docker_python):
sched = QueueScheduler()
with Running(sched, name='test-scheduler'):
sched.submit(docker_python)
sched.wait() # block until all tasks finishes
assert docker_python.status.data == 10
|
DESHRAJ/fjord | refs/heads/master | vendor/packages/chardet/chardet/hebrewprober.py | 2928 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
wanderine/nipype | refs/heads/master | nipype/interfaces/camino/odf.py | 1 | """
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
import os
from nipype.interfaces.base import (CommandLineInputSpec, CommandLine, traits,
TraitedSpec, File, StdOutCommandLine,
StdOutCommandLineInputSpec, isdefined)
from nipype.utils.filemanip import split_filename
class QBallMXInputSpec(StdOutCommandLineInputSpec):
basistype = traits.Enum('rbf', 'sh', argstr='-basistype %s',
desc=('Basis function type. "rbf" to use radial basis functions '
'"sh" to use spherical harmonics'), usedefault=True)
scheme_file = File(exists=True, argstr='-schemefile %s', mandatory=True,
desc='Specifies the scheme file for the diffusion MRI data')
order = traits.Int(argstr='-order %d', units='NA',
desc=('Specific to sh. Maximum order of the spherical harmonic series. '
'Default is 4.'))
rbfpointset = traits.Int(argstr='-rbfpointset %d', units='NA',
desc=('Specific to rbf. Sets the number of radial basis functions to use. '
'The value specified must be present in the Pointsets directory. '
'The default value is 246.'))
rbfsigma = traits.Float(argstr='-rbfsigma %f', units='NA',
desc=('Specific to rbf. Sets the width of the interpolating basis functions. '
'The default value is 0.2618 (15 degrees).'))
smoothingsigma = traits.Float(argstr='-smoothingsigma %f', units='NA',
desc=('Specific to rbf. Sets the width of the smoothing basis functions. '
'The default value is 0.1309 (7.5 degrees).'))
class QBallMXOutputSpec(TraitedSpec):
qmat = File(exists=True, desc='Q-Ball reconstruction matrix')
class QBallMX(StdOutCommandLine):
"""
Generates a reconstruction matrix for Q-Ball. Used in LinRecon with
the same scheme file to reconstruct data.
Example 1
---------
To create a linear transform matrix using Spherical Harmonics (sh).
>>> import nipype.interfaces.camino as cam
>>> qballmx = cam.QBallMX()
>>> qballmx.inputs.scheme_file = 'A.scheme'
>>> qballmx.inputs.basistype = 'sh'
>>> qballmx.inputs.order = 6
>>> qballmx.run() # doctest: +SKIP
Example 2
---------
To create a linear transform matrix using Radial Basis Functions
(rbf). This command uses the default setting of rbf sigma = 0.2618
(15 degrees), data smoothing sigma = 0.1309 (7.5 degrees), rbf
pointset 246
>>> import nipype.interfaces.camino as cam
>>> qballmx = cam.QBallMX()
>>> qballmx.inputs.scheme_file = 'A.scheme'
>>> qballmx.run() # doctest: +SKIP
The linear transform matrix from any of these two examples can then
be run over each voxel using LinRecon
>>> qballcoeffs = cam.LinRecon()
>>> qballcoeffs.inputs.in_file = 'SubjectA.Bfloat'
>>> qballcoeffs.inputs.scheme_file = 'A.scheme'
>>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble'
>>> qballcoeffs.inputs.normalize = True
>>> qballcoeffs.inputs.bgmask = 'brain_mask.nii'
>>> qballcoeffs.run() # doctest: +SKIP
"""
_cmd = 'qballmx'
input_spec=QBallMXInputSpec
output_spec=QBallMXOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['qmat'] = os.path.abspath(self._gen_outfilename())
return outputs
def _gen_outfilename(self):
_, name , _ = split_filename(self.inputs.scheme_file)
return name + '_qmat.Bdouble'
class LinReconInputSpec(StdOutCommandLineInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=1,
desc='voxel-order data filename')
scheme_file = File(exists=True, argstr='%s', mandatory=True, position=2,
desc='Specifies the scheme file for the diffusion MRI data')
qball_mat = File(exists=True, argstr='%s', mandatory=True, position=3,
desc='Linear transformation matrix.')
normalize = traits.Bool(argstr='-normalize',
desc=('Normalize the measurements and discard '
'the zero measurements before the linear transform.'))
log = traits.Bool(argstr='-log',
desc=('Transform the log measurements rather than the '
'measurements themselves'))
bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask')
class LinReconOutputSpec(TraitedSpec):
recon_data = File(exists=True, desc='Transformed data')
class LinRecon(StdOutCommandLine):
"""
Runs a linear transformation in each voxel.
Reads a linear transformation from the matrix file assuming the
imaging scheme specified in the scheme file. Performs the linear
transformation on the data in every voxel and outputs the result to
the standard output. The ouput in every voxel is actually: ::
[exit code, ln(S(0)), p1, ..., pR]
where p1, ..., pR are the parameters of the reconstruction.
Possible exit codes are:
- 0. No problems.
- 6. Bad data replaced by substitution of zero.
The matrix must be R by N+M where N+M is the number of measurements
and R is the number of parameters of the reconstruction. The matrix
file contains binary double-precision floats. The matrix elements
are stored row by row.
Example
---------
First run QBallMX and create a linear transform matrix using
Spherical Harmonics (sh).
>>> import nipype.interfaces.camino as cam
>>> qballmx = cam.QBallMX()
>>> qballmx.inputs.scheme_file = 'A.scheme'
>>> qballmx.inputs.basistype = 'sh'
>>> qballmx.inputs.order = 4
>>> qballmx.run() # doctest: +SKIP
Then run it over each voxel using LinRecon
>>> qballcoeffs = cam.LinRecon()
>>> qballcoeffs.inputs.in_file = 'SubjectA.Bfloat'
>>> qballcoeffs.inputs.scheme_file = 'A.scheme'
>>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble'
>>> qballcoeffs.inputs.normalize = True
>>> qballcoeffs.run() # doctest: +SKIP
"""
_cmd = 'linrecon'
input_spec=LinReconInputSpec
output_spec=LinReconOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['recon_data'] = os.path.abspath(self._gen_outfilename())
return outputs
def _gen_outfilename(self):
_, name , _ = split_filename(self.inputs.scheme_file)
return name + '_recondata.Bdouble'
class MESDInputSpec(StdOutCommandLineInputSpec):
in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, position=1,
desc='voxel-order data filename')
inverter = traits.Enum('SPIKE', 'PAS', argstr='-filter %s', position=2, mandatory=True,
desc=('The inversion index specifies the type of inversion to perform on the data.'
'The currently available choices are:'
'Inverter name | Inverter parameters'
'---------------|------------------'
'SPIKE | bd (b-value x diffusivity along the fibre.)'
'PAS | r'))
inverter_param = traits.Float(argstr='%f', units='NA', position=3, mandatory=True,
desc=('Parameter associated with the inverter. Cf. inverter description for'
'more information.'))
fastmesd = traits.Bool(argstr='-fastmesd', requires=['mepointset'],
desc=('Turns off numerical integration checks and fixes the integration point set size at that of'
'the index specified by -basepointset..'))
mepointset = traits.Int(argstr='-mepointset %d', units='NA',
desc=('Use a set of directions other than those in the scheme file for the deconvolution kernel.'
'The number refers to the number of directions on the unit sphere. For example, '
'"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".'))
scheme_file = File(exists=True, argstr='-schemefile %s', mandatory=True,
desc='Specifies the scheme file for the diffusion MRI data')
bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask')
inputdatatype = traits.Enum('float', 'char', 'short', 'int', 'long', 'double', argstr='-inputdatatype %s',
desc=('Specifies the data type of the input file: "char", "short", "int", "long",'
'"float" or "double". The input file must have BIG-ENDIAN ordering.'
'By default, the input type is "float".'))
class MESDOutputSpec(TraitedSpec):
mesd_data = File(exists=True, desc='MESD data')
class MESD(StdOutCommandLine):
"""
MESD is a general program for maximum entropy spherical deconvolution.
It also runs PASMRI, which is a special case of spherical deconvolution.
The input data must be in voxel order.
The format of the output in each voxel is:
{ exitcode, ln(A^star(0)), lambda_0, lambda_1, ..., lambda_N }
The exitcode contains the results of three tests. The first test thresholds
the maximum relative error between the numerical integrals computed at con-
vergence and those computed using a larger test point set; if the error is
greater than a threshold the exitcode is increased from zero to one as a
warning; if it is greater than a larger threshold the exitcode is increased to
two to suggest failure. The second test thresholds the predicted error in
numerical integrals computed using the test point set; if the predicted error
is greater than a threshold the exitcode is increased by 10. The third test
thresholds the RMS error between the measurements and their predictions from
the fitted deconvolution; if the errors are greater than a threshold, the exit
code is increased by 100. An exitcode of 112 means that all three tests were
failed and the result is likely to be unreliable. If all is well the exitcode
is zero. Results are often still reliable even if one or two of the tests are
failed.
Other possible exitcodes are:
- 5 - The optimization failed to converge
- -1 - Background
- -100 - Something wrong in the MRI data, e.g. negative or zero measurements,
so that the optimization could not run.
The standard MESD implementation is computationally demanding, particularly
as the number of measurements increases (computation is approximately O(N^2),
where N is the number of measurements). There are two ways to obtain significant
computational speed-up:
i) Turn off error checks and use a small point set for computing numerical
integrals in the algorithm by adding the flag -fastmesd. Sakaie CDMRI 2008
shows that using the smallest point set (-basepointset 0) with no
error checks usually has only a minor effect on the output of the algorithm,
but provides a major reduction in computation time. You can increase the point
set size using -basepointset with an argument higher than 0, which may produce
better results in some voxels, but will increase computation time, which
approximately doubles every time the point set index increases by 1.
ii) Reduce the complexity of the maximum entropy encoding using -mepointset <X>.
By default <X> = N, the number of measurements, and is the number of parameters
in the max. ent. representation of the output function, ie the number of
lambda parameters, as described in Jansons and Alexander Inverse Problems 2003.
However, we can represent the function using less components and <X> here
specifies the number of lambda parameters. To obtain speed-up, set <X>
< N; complexity become O(<X>^2) rather than O(N^2). Note that <X> must be chosen
so that the camino/PointSets directory contains a point set with that number
of elements. When -mepointset decreases, the numerical integration checks
make less and less of a difference and smaller point sets for numerical
integration (see -basepointset) become adequate. So when <X> is low -fastmesd is
worth using to get even more speed-up.
The choice of <X> is a parameter of the technique. Too low and you lose angular
resoloution; too high and you see no computational benefit and may even suffer
from overfitting. Empirically, we have found that <X>=16 often gives good
results and good speed up, but it is worth trying a few values a comparing
performance. The reduced encoding is described in the following ISMRM abstract:
Sweet and Alexander "Reduced Encoding Persistent Angular Structure" 572 ISMRM 2010.
Example
---------
Run MESD on every voxel of the data file SubjectA.Bfloat using the PASMRI kernel.
>>> import nipype.interfaces.camino as cam
>>> mesd = cam.MESD()
>>> mesd.inputs.in_file = 'SubjectA.Bfloat'
>>> mesd.inputs.scheme_file = 'A.scheme'
>>> mesd.inputs.inverter = 'PAS'
>>> mesd.inputs.inverter_param = 1.4
>>> mesd.run() # doctest: +SKIP
"""
_cmd = 'mesd'
input_spec=MESDInputSpec
output_spec=MESDOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['mesd_data'] = os.path.abspath(self._gen_outfilename())
return outputs
def _gen_outfilename(self):
_, name , _ = split_filename(self.inputs.scheme_file)
return name + '_MESD.Bdouble'
class SFPeaksInputSpec(StdOutCommandLineInputSpec):
in_file = File(exists=True, argstr='-inputfile %s', mandatory=True,
desc='Voxel-order data of spherical functions')
inputmodel = traits.Enum('sh', 'maxent', 'rbf', argstr='-inputmodel %s', mandatory=True,
desc=('Type of functions input via in_file. Currently supported options are: '
' sh - Spherical harmonic series. Specify the maximum order of the SH series '
' with the "order" attribute if different from the default of 4. '
' maxent - Maximum entropy representations output by MESD. The reconstruction '
' directions input to MESD must be specified. By default this is the '
' same set of gradient directions (excluding zero gradients) in the '
' scheme file, so specify the "schemefile" attribute unless the '
' "mepointset" attribute was set in MESD. '
' rbf - Sums of radial basis functions. Specify the pointset with the attribute '
' "rbfpointset" if different from the default. See QBallMX.'))
order = traits.Int(argstr='-order %d', units='NA',
desc='Specific to sh. Maximum order of the spherical harmonic series.')
scheme_file = File(exists=True, argstr='%s',
desc='Specific to maxent. Specifies the scheme file.')
rbfpointset = traits.Int(argstr='-rbfpointset %d', units='NA',
desc=('Specific to rbf. Sets the number of radial basis functions to use. '
'The value specified must be present in the Pointsets directory. '
'The default value is 246.'))
mepointset = traits.Int(argstr='-mepointset %d', units='NA',
desc=('Use a set of directions other than those in the scheme file for the deconvolution '
'kernel. The number refers to the number of directions on the unit sphere. '
'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" '
'Use this option only if you told MESD to use a custom set of directions with the same '
'option. Otherwise, specify the scheme file with the "schemefile" attribute.'))
numpds = traits.Int(argstr='-numpds %d', units='NA',
desc='The largest number of peak directions to output in each voxel.')
noconsistencycheck = traits.Bool(argstr='-noconsistencycheck',
desc='Turns off the consistency check. The output shows all consistencies as true.')
searchradius = traits.Float(argstr='-searchradius %f', units='NA',
desc='The search radius in the peak finding algorithm. The default is 0.4 (cf. "density")')
density = traits.Int(argstr='-density %d', units='NA',
desc=('The number of randomly rotated icosahedra to use in constructing the set of points for '
'random sampling in the peak finding algorithm. Default is 1000, which works well for very '
'spiky maxent functions. For other types of function, it is reasonable to set the density '
'much lower and increase the search radius slightly, which speeds up the computation.'))
pointset = traits.Int(argstr='-pointset %d', units='NA',
desc=('To sample using an evenly distributed set of points instead. The integer can be '
'0, 1, ..., 7. Index 0 gives 1082 points, 1 gives 1922, 2 gives 3002, 3 gives 4322, '
'4 gives 5882, 5 gives 8672, 6 gives 12002, 7 gives 15872.'))
pdthresh = traits.Float(argstr='-pdthresh %f', units='NA',
desc=('Base threshold on the actual peak direction strength divided by the mean of the '
'function. The default is 1.0 (the peak must be equal or greater than the mean).'))
stdsfrommean = traits.Float(argstr='-stdsfrommean %f', units='NA',
desc=('This is the number of standard deviations of the function to be added to the '
'"pdthresh" attribute in the peak directions pruning.'))
class SFPeaksOutputSpec(TraitedSpec):
peaks = File(exists=True, desc='Peaks of the spherical functions.')
class SFPeaks(StdOutCommandLine):
"""
Finds the peaks of spherical functions.
This utility reads coefficients of the spherical functions and
outputs a list of peak directions of the function. It computes the
value of the function at each of a set of sample points. Then it
finds local maxima by finding all points at which the function is
larger than for any other point within a fixed search radius (the
default is 0.4). The utility then uses Powell's algorithm to
optimize the position of each local maximum. Finally the utility
removes duplicates and tiny peaks with function value smaller than
some threshold, which is the mean of the function plus some number
of standard deviations. By default the program checks for con-
sistency with a second set of starting points, but skips the
optimization step. To speed up execution, you can turn off the con-
sistency check by setting the noconsistencycheck flag to True.
By default, the utility constructs a set of sample points by
randomly rotating a unit icosahedron repeatedly (the default is 1000
times, which produces a set of 6000 points) and concatenating the
lists of vertices. The 'pointset = <index>' attribute can tell the
utility to use an evenly distributed set of points (index 0 gives
1082 points, 1 gives 1922, 2 gives 4322, 3 gives 8672, 4 gives 15872,
5 gives 32762, 6 gives 72032), which is quicker, because you can get
away with fewer points. We estimate that you can use a factor of 2.5
less evenly distributed points than randomly distributed points and
still expect similar performance levels.
The output for each voxel is:
- exitcode (inherited from the input data).
- ln(A(0))
- number of peaks found.
- flag for consistency with a repeated run (number of directions is
the same and the directions are the same to within a threshold.)
- mean(f).
- std(f).
- direction 1 (x, y, z, f, H00, H01, H10, H11).
- direction 2 (x, y, z, f, H00, H01, H10, H11).
- direction 3 (x, y, z, f, H00, H01, H10, H11).
H is the Hessian of f at the peak. It is the matrix: ::
[d^2f/ds^2 d^2f/dsdt]
[d^2f/dtds d^2f/dt^2]
= [H00 H01]
[H10 H11]
where s and t are orthogonal coordinates local to the peak.
By default the maximum number of peak directions output in each
voxel is three. If less than three directions are found, zeros are
output for later directions. The peaks are ordered by the value of
the function at the peak. If more than the maximum number of
directions are found only the strongest ones are output. The maximum
number can be changed setting the 'numpds' attribute.
The utility can read various kinds of spherical function, but must
be told what kind of function is input using the 'inputmodel'
attribute. The description of the 'inputmodel' attribute lists
additional information required by SFPeaks for each input model.
Example
---------
First run QBallMX and create a linear transform matrix using
Spherical Harmonics (sh).
>>> import nipype.interfaces.camino as cam
>>> sf_peaks = cam.SFPeaks()
>>> sf_peaks.inputs.in_file = 'A_recon_params.Bdouble'
>>> sf_peaks.inputs.inputmodel = 'sh'
>>> sf_peaks.inputs.order = 4
>>> sf_peaks.inputs.density = 100
>>> sf_peaks.inputs.searchradius = 1.0
>>> sf_peaks.run() # doctest: +SKIP
"""
_cmd = 'sfpeaks'
input_spec=SFPeaksInputSpec
output_spec=SFPeaksOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['peaks'] = os.path.abspath(self._gen_outfilename())
return outputs
def _gen_outfilename(self):
_, name , _ = split_filename(self.inputs.in_file)
return name + '_peaks.Bdouble'
|
mancoast/CPythonPyc_test | refs/heads/master | cpython/252_test_applesingle.py | 24 | # Copyright (C) 2003 Python Software Foundation
import unittest
import macostools
import Carbon.File
import MacOS
import os
import sys
from test import test_support
import struct
import applesingle
AS_MAGIC=0x00051600
AS_VERSION=0x00020000
dataforkdata = 'hello\r\0world\n'
resourceforkdata = 'goodbye\ncruel\0world\r'
applesingledata = struct.pack(">ll16sh", AS_MAGIC, AS_VERSION, "foo", 2) + \
struct.pack(">llllll", 1, 50, len(dataforkdata),
2, 50+len(dataforkdata), len(resourceforkdata)) + \
dataforkdata + \
resourceforkdata
TESTFN2 = test_support.TESTFN + '2'
class TestApplesingle(unittest.TestCase):
def setUp(self):
fp = open(test_support.TESTFN, 'w')
fp.write(applesingledata)
fp.close()
def tearDown(self):
try:
os.unlink(test_support.TESTFN)
except:
pass
try:
os.unlink(TESTFN2)
except:
pass
def compareData(self, isrf, data):
if isrf:
fp = MacOS.openrf(TESTFN2, '*rb')
else:
fp = open(TESTFN2, 'rb')
filedata = fp.read(1000)
self.assertEqual(data, filedata)
def test_applesingle(self):
try:
os.unlink(TESTFN2)
except:
pass
applesingle.decode(test_support.TESTFN, TESTFN2)
self.compareData(False, dataforkdata)
self.compareData(True, resourceforkdata)
def test_applesingle_resonly(self):
try:
os.unlink(TESTFN2)
except:
pass
applesingle.decode(test_support.TESTFN, TESTFN2, resonly=True)
self.compareData(False, resourceforkdata)
def test_main():
test_support.run_unittest(TestApplesingle)
if __name__ == '__main__':
test_main()
|
impredicative/nodeforge | refs/heads/master | src/nodeforge/gui/SmartInput.py | 1 | """
Smart Input Box
A heavily featured entry widget for processing user input.
It sends multilined text with a linefeed appended to the end.
Input is received by a blocking function created by raw_input()
Additional features include selection of the entire block when focused on.
The box is also selected when the user presses enter for easy deletion if needed.
Select all and tabbing is implemented.
TODO:
UP/DOWN history box
Crtl+backspace deletion
Autocomplete
"""
from Tkinter import *
from Queue import Queue
class SmartInput(Entry, object):
historySize = 20
historyIndex = 0
def __init__(self, master=None, cnf={}, **kw):
Entry.__init__(self, master, cnf, takefocus=False, **kw)
self.alert = []
self.alertFuncs = []
self.history = []
self.bind('<Return>', self.onEnter)
self.bind('<BackSpace>', self.onBackspace)
self.bind('<FocusIn>', self.onFocus)
self.bind('<Tab>', self.onTab)
self.bind('a', self.onA)
self.bind('<Up>', self.onUp)
self.bind('<Down>', self.onDown)
def onUp(self, event):
if self.historyIndex < (len(self.history)-1):
# save what we have typed if we are leaving upwards
if self.historyIndex == 0:
self.addHistory()
self.historyIndex = self.historyIndex+1
self.setText(self.history[self.historyIndex])
self.selectAll()
def onDown(self, event):
if self.historyIndex > 0:
self.historyIndex = self.historyIndex-1
self.setText(self.history[self.historyIndex])
self.selectAll()
def onA(self, event):
"""
Select all with crtl+a
"""
if event.state == 4:
self.selectAll()
return "break"
def onTab(self, event):
self.deleteSelection()
self.insert(INSERT, '\t')
return "break"
def onFocus(self, event):
self.xview(END)
# select the whole line
if not self.selection_present():
self.selectAll()
def onBackspace(self, event):
"""
TODO: deleting words with crtl+backspace
"""
if event.state == 4:
return "break"
def onEnter(self, event):
self.announceInput(self.get())
self.selection_range(0,END)
self.addHistory()
self.historyIndex = 0
def addHistory(self):
"""
If the command was repeated, or blank, do not add.
Pop the last string if the queue is full.
Then add the new one to front.
"""
data = self.get()
if data == '':
return
elif len(self.history) != 0 and self.history[0] == data:
return
if len(self.history) == self.historySize:
self.history.pop()
self.history.insert(0, data)
def setText(self, text):
self.delete(0, END)
self.insert(0, text)
def selectAll(self):
self.selection_range(0,END)
def deleteSelection(self):
a,b = self.getSelectIndex()
self.delete(a,b)
def getSelectIndex(self):
a = self.index(ANCHOR)
b = self.index(INSERT)
return (min(a,b), max(a,b))
def announceInput(self, txt):
"""
Send the message to all the queues. Split the message up
by newlines
"""
txt = txt.split('\n')
for line in txt:
for func in self.alertFuncs:
func(line)
for queue in self.alert:
queue.put(line)
def callOnInput(self, func):
"""
When the user sends input, func will be called with the data.
"""
self.alertFuncs.append(func)
def raw_input(self, prompt=''):
"""
Replacement for raw_input. This returns a new function that
returns input sent from the box with an enter key.
Currently blocking.
TODO: somehow carry prompt info?
"""
newQueue = Queue()
self.alert.append(newQueue)
def requestItem(prompt=''):
out = newQueue.get()
return out
return requestItem |
benediktschmitt/sqlalchemy-materialized-paths | refs/heads/master | sqlalchemy_materialized_paths.py | 1 | #!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Benedikt Schmitt <benedikt@benediktschmitt.de>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# third party
import sqlalchemy
from sqlalchemy import Column, String
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
__all__ = [
"MaterializedPathMixin",
"register_events",
"unregister_events"
]
#: The current version of this library.
VERSION = "0.0.0"
class MaterializedPathMixin(object):
"""
A mixin for the materialized path pattern which allows to store hierarchical
data in an SQL database.
The model used with the mixin must have an integer valued primary key column named *id*.
Examples for materialized paths:
* ".1" Root as parent
* ".1.2" Root -> 1 -> self
.. hint::
Don't forget to register the events for your model with :func:`register_events`.
"""
_mat_path = Column("materialized_path", String(), nullable=False, index=True)
def __init__(self, parent=None):
self.move(parent)
return None
@hybrid_property
def path(self):
"""The materialized path to the node. The path can only be changed
using the :meth:`move` method.
"""
return self._mat_path
@hybrid_property
def __id(self):
"""A reference to the primary key defined in the actual model."""
return self.id
def move(self, parent):
"""Changes the parent of this node to *parent*."""
parent_path = "" if parent is None else parent.path
assert not "INIT:" in parent_path
# Has the path been initialised yet?
init = self._mat_path is None or self._mat_path.startswith("INIT:")
# initialisation without id
if init and self.__id is None:
self._mat_path = "INIT:" + parent_path
# initialisation with id
elif init and self.__id is not None:
self._mat_path = parent_path + "." + str(self.__id)
# no initialisation
else:
if parent_path.startswith(self._mat_path):
raise ValueError("Can not move to a descendant.")
old_path = self._mat_path
new_path = parent_path + "." + str(self.__id)
for node in self.descendants:
node._mat_path = new_path + node._mat_path[len(old_path):]
self._mat_path = new_path
return None
@declared_attr
def descendants(cls):
"""All descendants (not inclusive)."""
from sqlalchemy.orm import remote, foreign, relationship
primaryjoin = remote(foreign(cls._mat_path)).like(cls._mat_path.concat(".%"))
descendants = relationship(
cls.__name__, viewonly=True, order_by=cls._mat_path,
primaryjoin=primaryjoin, lazy="dynamic", cascade="all,delete-orphan"
)
return descendants
@hybrid_method
def is_descendant_of(self, other):
"""Returns true if *other* is a real descendant of this node (not
inclusive).
"""
if other is None:
return True
return other.is_ancestor_of(self)
@declared_attr
def ancestors(cls):
"""All ancestors (not inclusive) in ascending tree order."""
from sqlalchemy.orm import remote, foreign, relationship
primaryjoin = cls._mat_path.like(remote(foreign(cls._mat_path)).concat(".%"))
ancestors = relationship(
cls.__name__, viewonly=True, order_by=cls._mat_path,
primaryjoin=primaryjoin, lazy="dynamic"
)
return ancestors
@hybrid_method
def is_ancestor_of(self, other):
"""Returns true if *other* is a real ancestors of this node (not
inclusive).
"""
if other is None:
return False
return other._mat_path.startswith(self._mat_path + ".")
@is_ancestor_of.expression
def is_ancestor_of(cls, other):
from sqlalchemy.sql import func, type_coerce
if other is None:
return False
return type_coerce(other._mat_path, String).like(cls._mat_path.concat(".%"))
@declared_attr
def children(cls):
"""All children (direct ancestors) of this node."""
from sqlalchemy.orm import remote, foreign, relationship
child_path1 = remote(foreign(cls._mat_path))
child_path2 = cls._mat_path.concat(".").concat(remote(foreign(cls.__id)))
primaryjoin = (child_path1 == child_path2)
children = relationship(
cls.__name__, viewonly=True, primaryjoin=primaryjoin, lazy="dynamic"
)
return children
@hybrid_method
def is_child_of(self, other):
"""Returns true if *other* is a real child of this node. (not
inclusive).
"""
other_path = "" if other is None else other._mat_path
return other_path + "." + str(self.__id) == self._mat_path
@is_child_of.expression
def is_child_of(cls, other):
from sqlalchemy.sql import type_coerce
other_path = "" if other is None else other._mat_path
return other_path + "." + type_coerce(cls.__id, String) == cls._mat_path
@hybrid_property
def has_children(self):
"""True if this node has any children."""
from sqlalchemy.orm import object_session
session = object_session(self)
return session.query(self.children.exists()).scalar()
@has_children.expression
def has_children(cls):
from sqlalchemy.orm import aliased
from sqlalchemy.sql import exists
alias = aliased(cls)
sql = exists().where(alias._mat_path.like(cls._mat_path.concat(".%")))
return sql
@property
def parent_id(self):
"""The id of the parent node."""
path = self._mat_path.split(".")
parent_id = int(path[-2]) if len(path) > 2 else None
return parent_id
@declared_attr
def parent(cls):
"""The parent of this node."""
from sqlalchemy.orm import remote, foreign, relationship
primaryjoin = remote(foreign(cls._mat_path)).concat(".").concat(cls.__id) == cls._mat_path
parent = relationship(
cls.__name__, viewonly=True, primaryjoin=primaryjoin, uselist=False, lazy="select"
)
return parent
@hybrid_method
def is_parent_of(self, other):
"""Returns true if this node is the parent of *other*."""
if other is None:
return False
return self._mat_path + "." + str(other.__id) == other._mat_path
@is_parent_of.expression
def is_parent_of(cls, other):
if other is None:
return False
return cls._mat_path.concat(".").concat(other.__id) == other._mat_path
@hybrid_property
def level(self):
"""Returns the depth of the node in the tree (number of parents). Nodes
with no parent have level 1.
"""
return self._mat_path.count(".")
@level.expression
def level(cls):
from sqlalchemy.sql import func
count = lambda col, seq: func.length(col) - func.length(func.replace(col, seq, ""))
return count(cls._mat_path, ".")
def after_insert(mapper, connection, path):
"""Complete the materialized path after insertion."""
from sqlalchemy.orm.attributes import set_committed_value
from sqlalchemy.sql import update
# Get the mapped table.
assert len(mapper.tables) == 1
table = mapper.tables[0]
# Nothing to do.
if not path.path.startswith("INIT:"):
return None
# Replace the temporary path with the real, complete path.
parent_path = path.path[len("INIT:"):]
real_path = parent_path + "." + str(path.id)
stmt = table.update().where(table.c.id == path.id).values(materialized_path=real_path)
connection.execute(stmt)
# Update the object without changing the history.
set_committed_value(path, "_mat_path", real_path)
assert path.path == real_path
return None
def register_events(table):
"""Registers SQLAlchemy events for :class:`MaterializedPathMixin` subclass *table*."""
assert issubclass(table, MaterializedPathMixin)
sqlalchemy.event.listen(table, "after_insert", after_insert)
return None
def unregister_events(table):
"""Unregisters the events which have previously been registered with
:func:`register_events`.
"""
assert issubclass(table, MaterializedPathMixin)
sqlalchemy.event.remove(table, "after_insert", after_insert)
return None
|
SerialShadow/SickRage | refs/heads/master | lib/github/MainClass.py | 35 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Ed Jackson <ed.jackson@gmail.com> #
# Copyright 2013 Jonathan J Hunt <hunt@braincorporation.com> #
# Copyright 2013 Peter Golm <golm.peter@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import urllib
import pickle
from Requester import Requester
import AuthenticatedUser
import NamedUser
import Organization
import Gist
import github.PaginatedList
import Repository
import Legacy
import github.GithubObject
import HookDescription
import GitignoreTemplate
import Status
import StatusMessage
import RateLimit
DEFAULT_BASE_URL = "https://api.github.com"
DEFAULT_TIMEOUT = 10
DEFAULT_PER_PAGE = 30
class Github(object):
"""
This is the main class you instanciate to access the Github API v3. Optional parameters allow different authentication methods.
"""
def __init__(self, login_or_token=None, password=None, base_url=DEFAULT_BASE_URL, timeout=DEFAULT_TIMEOUT, client_id=None, client_secret=None, user_agent='PyGithub/Python', per_page=DEFAULT_PER_PAGE, api_preview=False):
"""
:param login_or_token: string
:param password: string
:param base_url: string
:param timeout: integer
:param client_id: string
:param client_secret: string
:param user_agent: string
:param per_page: int
"""
assert login_or_token is None or isinstance(login_or_token, (str, unicode)), login_or_token
assert password is None or isinstance(password, (str, unicode)), password
assert isinstance(base_url, (str, unicode)), base_url
assert isinstance(timeout, (int, long)), timeout
assert client_id is None or isinstance(client_id, (str, unicode)), client_id
assert client_secret is None or isinstance(client_secret, (str, unicode)), client_secret
assert user_agent is None or isinstance(user_agent, (str, unicode)), user_agent
assert isinstance(api_preview, (bool))
self.__requester = Requester(login_or_token, password, base_url, timeout, client_id, client_secret, user_agent, per_page, api_preview)
def __get_FIX_REPO_GET_GIT_REF(self):
"""
:type: bool
"""
return self.__requester.FIX_REPO_GET_GIT_REF
def __set_FIX_REPO_GET_GIT_REF(self, value):
self.__requester.FIX_REPO_GET_GIT_REF = value
FIX_REPO_GET_GIT_REF = property(__get_FIX_REPO_GET_GIT_REF, __set_FIX_REPO_GET_GIT_REF)
def __get_per_page(self):
"""
:type: int
"""
return self.__requester.per_page
def __set_per_page(self, value):
self.__requester.per_page = value
# v2: Remove this property? Why should it be necessary to read/modify it after construction
per_page = property(__get_per_page, __set_per_page)
# v2: Provide a unified way to access values of headers of last response
# v2: (and add/keep ad hoc properties for specific useful headers like rate limiting, oauth scopes, etc.)
# v2: Return an instance of a class: using a tuple did not allow to add a field "resettime"
@property
def rate_limiting(self):
"""
First value is requests remaining, second value is request limit.
:type: (int, int)
"""
remaining, limit = self.__requester.rate_limiting
if limit < 0:
self.get_rate_limit()
return self.__requester.rate_limiting
@property
def rate_limiting_resettime(self):
"""
Unix timestamp indicating when rate limiting will reset.
:type: int
"""
if self.__requester.rate_limiting_resettime == 0:
self.get_rate_limit()
return self.__requester.rate_limiting_resettime
def get_rate_limit(self):
"""
Don't forget you can access the rate limit returned in headers of last Github API v3 response, by :attr:`github.MainClass.Github.rate_limiting` and :attr:`github.MainClass.Github.rate_limiting_resettime`.
:calls: `GET /rate_limit <http://developer.github.com/v3/rate_limit>`_
:rtype: :class:`github.RateLimit.RateLimit`
"""
headers, attributes = self.__requester.requestJsonAndCheck(
'GET',
'/rate_limit'
)
return RateLimit.RateLimit(self.__requester, headers, attributes, True)
@property
def oauth_scopes(self):
"""
:type: list of string
"""
return self.__requester.oauth_scopes
def get_user(self, login=github.GithubObject.NotSet):
"""
:calls: `GET /users/:user <http://developer.github.com/v3/users>`_ or `GET /user <http://developer.github.com/v3/users>`_
:param login: string
:rtype: :class:`github.NamedUser.NamedUser`
"""
assert login is github.GithubObject.NotSet or isinstance(login, (str, unicode)), login
if login is github.GithubObject.NotSet:
return AuthenticatedUser.AuthenticatedUser(self.__requester, {}, {"url": "/user"}, completed=False)
else:
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/users/" + login
)
return github.NamedUser.NamedUser(self.__requester, headers, data, completed=True)
def get_users(self, since=github.GithubObject.NotSet):
"""
:calls: `GET /users <http://developer.github.com/v3/users>`_
:param since: integer
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
assert since is github.GithubObject.NotSet or isinstance(since, (int, long)), since
url_parameters = dict()
if since is not github.GithubObject.NotSet:
url_parameters["since"] = since
return github.PaginatedList.PaginatedList(
github.NamedUser.NamedUser,
self.__requester,
"/users",
url_parameters
)
def get_organization(self, login):
"""
:calls: `GET /orgs/:org <http://developer.github.com/v3/orgs>`_
:param login: string
:rtype: :class:`github.Organization.Organization`
"""
assert isinstance(login, (str, unicode)), login
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/orgs/" + login
)
return github.Organization.Organization(self.__requester, headers, data, completed=True)
def get_repo(self, full_name_or_id, lazy=True):
"""
:calls: `GET /repos/:owner/:repo <http://developer.github.com/v3/repos>`_ or `GET /repositories/:id <http://developer.github.com/v3/repos>`_
:rtype: :class:`github.Repository.Repository`
"""
assert isinstance(full_name_or_id, (str, unicode, int, long)), full_name_or_id
url_base = "/repositories/" if isinstance(full_name_or_id, int) or isinstance(full_name_or_id, long) else "/repos/"
url = "%s%s" % (url_base, full_name_or_id)
if lazy:
return Repository.Repository(self.__requester, {}, {"url": url}, completed=False)
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"%s%s" % (url_base, full_name_or_id)
)
return Repository.Repository(self.__requester, headers, data, completed=True)
def get_repos(self, since=github.GithubObject.NotSet):
"""
:calls: `GET /repositories <http://developer.github.com/v3/repos/#list-all-public-repositories>`_
:param since: integer
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert since is github.GithubObject.NotSet or isinstance(since, (int, long)), since
url_parameters = dict()
if since is not github.GithubObject.NotSet:
url_parameters["since"] = since
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self.__requester,
"/repositories",
url_parameters
)
def get_gist(self, id):
"""
:calls: `GET /gists/:id <http://developer.github.com/v3/gists>`_
:param id: string
:rtype: :class:`github.Gist.Gist`
"""
assert isinstance(id, (str, unicode)), id
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/gists/" + id
)
return github.Gist.Gist(self.__requester, headers, data, completed=True)
def get_gists(self):
"""
:calls: `GET /gists/public <http://developer.github.com/v3/gists>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
return github.PaginatedList.PaginatedList(
github.Gist.Gist,
self.__requester,
"/gists/public",
None
)
def legacy_search_repos(self, keyword, language=github.GithubObject.NotSet):
"""
:calls: `GET /legacy/repos/search/:keyword <http://developer.github.com/v3/search/legacy>`_
:param keyword: string
:param language: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert isinstance(keyword, (str, unicode)), keyword
assert language is github.GithubObject.NotSet or isinstance(language, (str, unicode)), language
args = {} if language is github.GithubObject.NotSet else {"language": language}
return Legacy.PaginatedList(
"/legacy/repos/search/" + urllib.quote_plus(keyword, safe='/%:><'),
args,
self.__requester,
"repositories",
Legacy.convertRepo,
github.Repository.Repository,
)
def legacy_search_users(self, keyword):
"""
:calls: `GET /legacy/user/search/:keyword <http://developer.github.com/v3/search/legacy>`_
:param keyword: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
assert isinstance(keyword, (str, unicode)), keyword
return Legacy.PaginatedList(
"/legacy/user/search/" + urllib.quote_plus(keyword, safe='/%:><'),
{},
self.__requester,
"users",
Legacy.convertUser,
github.NamedUser.NamedUser,
)
def legacy_search_user_by_email(self, email):
"""
:calls: `GET /legacy/user/email/:email <http://developer.github.com/v3/search/legacy>`_
:param email: string
:rtype: :class:`github.NamedUser.NamedUser`
"""
assert isinstance(email, (str, unicode)), email
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/legacy/user/email/" + email
)
return github.NamedUser.NamedUser(self.__requester, headers, Legacy.convertUser(data["user"]), completed=False)
def search_repositories(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers):
"""
:calls: `GET /search/repositories <http://developer.github.com/v3/search>`_
:param query: string
:param sort: string ('stars', 'forks', 'updated')
:param order: string ('asc', 'desc')
:param qualifiers: keyword dict query qualifiers
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert isinstance(query, (str, unicode)), query
url_parameters = dict()
if sort is not github.GithubObject.NotSet: # pragma no branch (Should be covered)
assert sort in ('stars', 'forks', 'updated'), sort
url_parameters["sort"] = sort
if order is not github.GithubObject.NotSet: # pragma no branch (Should be covered)
assert order in ('asc', 'desc'), order
url_parameters["order"] = order
query_chunks = []
if query: # pragma no branch (Should be covered)
query_chunks.append(query)
for qualifier, value in qualifiers.items():
query_chunks.append("%s:%s" % (qualifier, value))
url_parameters["q"] = ' '.join(query_chunks)
assert url_parameters["q"], "need at least one qualifier"
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self.__requester,
"/search/repositories",
url_parameters
)
def search_users(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers):
"""
:calls: `GET /search/users <http://developer.github.com/v3/search>`_
:param query: string
:param sort: string ('followers', 'repositories', 'joined')
:param order: string ('asc', 'desc')
:param qualifiers: keyword dict query qualifiers
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
assert isinstance(query, (str, unicode)), query
url_parameters = dict()
if sort is not github.GithubObject.NotSet:
assert sort in ('followers', 'repositories', 'joined'), sort
url_parameters["sort"] = sort
if order is not github.GithubObject.NotSet:
assert order in ('asc', 'desc'), order
url_parameters["order"] = order
query_chunks = []
if query:
query_chunks.append(query)
for qualifier, value in qualifiers.items():
query_chunks.append("%s:%s" % (qualifier, value))
url_parameters["q"] = ' '.join(query_chunks)
assert url_parameters["q"], "need at least one qualifier"
return github.PaginatedList.PaginatedList(
github.NamedUser.NamedUser,
self.__requester,
"/search/users",
url_parameters
)
def search_issues(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers):
"""
:calls: `GET /search/issues <http://developer.github.com/v3/search>`_
:param query: string
:param sort: string ('comments', 'created', 'updated')
:param order: string ('asc', 'desc')
:param qualifiers: keyword dict query qualifiers
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Issue.Issue`
"""
assert isinstance(query, (str, unicode)), query
url_parameters = dict()
if sort is not github.GithubObject.NotSet:
assert sort in ('comments', 'created', 'updated'), sort
url_parameters["sort"] = sort
if order is not github.GithubObject.NotSet:
assert order in ('asc', 'desc'), order
url_parameters["order"] = order
query_chunks = []
if query: # pragma no branch (Should be covered)
query_chunks.append(query)
for qualifier, value in qualifiers.items():
query_chunks.append("%s:%s" % (qualifier, value))
url_parameters["q"] = ' '.join(query_chunks)
assert url_parameters["q"], "need at least one qualifier"
return github.PaginatedList.PaginatedList(
github.Issue.Issue,
self.__requester,
"/search/issues",
url_parameters
)
def search_code(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers):
"""
:calls: `GET /search/code <http://developer.github.com/v3/search>`_
:param query: string
:param sort: string ('indexed')
:param order: string ('asc', 'desc')
:param qualifiers: keyword dict query qualifiers
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.ContentFile.ContentFile`
"""
assert isinstance(query, (str, unicode)), query
url_parameters = dict()
if sort is not github.GithubObject.NotSet: # pragma no branch (Should be covered)
assert sort in ('indexed',), sort
url_parameters["sort"] = sort
if order is not github.GithubObject.NotSet: # pragma no branch (Should be covered)
assert order in ('asc', 'desc'), order
url_parameters["order"] = order
query_chunks = []
if query: # pragma no branch (Should be covered)
query_chunks.append(query)
for qualifier, value in qualifiers.items():
query_chunks.append("%s:%s" % (qualifier, value))
url_parameters["q"] = ' '.join(query_chunks)
assert url_parameters["q"], "need at least one qualifier"
return github.PaginatedList.PaginatedList(
github.ContentFile.ContentFile,
self.__requester,
"/search/code",
url_parameters
)
def render_markdown(self, text, context=github.GithubObject.NotSet):
"""
:calls: `POST /markdown <http://developer.github.com/v3/markdown>`_
:param text: string
:param context: :class:`github.Repository.Repository`
:rtype: string
"""
assert isinstance(text, (str, unicode)), text
assert context is github.GithubObject.NotSet or isinstance(context, github.Repository.Repository), context
post_parameters = {
"text": text
}
if context is not github.GithubObject.NotSet:
post_parameters["mode"] = "gfm"
post_parameters["context"] = context._identity
status, headers, data = self.__requester.requestJson(
"POST",
"/markdown",
input=post_parameters
)
return data
def get_hook(self, name):
"""
:calls: `GET /hooks/:name <http://developer.github.com/v3/repos/hooks/>`_
:param name: string
:rtype: :class:`github.HookDescription.HookDescription`
"""
assert isinstance(name, (str, unicode)), name
headers, attributes = self.__requester.requestJsonAndCheck(
"GET",
"/hooks/" + name
)
return HookDescription.HookDescription(self.__requester, headers, attributes, completed=True)
def get_hooks(self):
"""
:calls: `GET /hooks <http://developer.github.com/v3/repos/hooks/>`_
:rtype: list of :class:`github.HookDescription.HookDescription`
"""
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/hooks"
)
return [HookDescription.HookDescription(self.__requester, headers, attributes, completed=True) for attributes in data]
def get_gitignore_templates(self):
"""
:calls: `GET /gitignore/templates <http://developer.github.com/v3/gitignore>`_
:rtype: list of string
"""
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/gitignore/templates"
)
return data
def get_gitignore_template(self, name):
"""
:calls: `GET /gitignore/templates/:name <http://developer.github.com/v3/gitignore>`_
:rtype: :class:`github.GitignoreTemplate.GitignoreTemplate`
"""
assert isinstance(name, (str, unicode)), name
headers, attributes = self.__requester.requestJsonAndCheck(
"GET",
"/gitignore/templates/" + name
)
return GitignoreTemplate.GitignoreTemplate(self.__requester, headers, attributes, completed=True)
def get_emojis(self):
"""
:calls: `GET /emojis <http://developer.github.com/v3/emojis/>`_
:rtype: dictionary of type => url for emoji`
"""
headers, attributes = self.__requester.requestJsonAndCheck(
"GET",
"/emojis"
)
return attributes
def create_from_raw_data(self, klass, raw_data, headers={}):
"""
Creates an object from raw_data previously obtained by :attr:`github.GithubObject.GithubObject.raw_data`,
and optionaly headers previously obtained by :attr:`github.GithubObject.GithubObject.raw_headers`.
:param klass: the class of the object to create
:param raw_data: dict
:param headers: dict
:rtype: instance of class ``klass``
"""
return klass(self.__requester, headers, raw_data, completed=True)
def dump(self, obj, file, protocol=0):
"""
Dumps (pickles) a PyGithub object to a file-like object.
Some effort is made to not pickle sensitive informations like the Github credentials used in the :class:`Github` instance.
But NO EFFORT is made to remove sensitive information from the object's attributes.
:param obj: the object to pickle
:param file: the file-like object to pickle to
:param protocol: the `pickling protocol <http://docs.python.org/2.7/library/pickle.html#data-stream-format>`_
"""
pickle.dump((obj.__class__, obj.raw_data, obj.raw_headers), file, protocol)
def load(self, f):
"""
Loads (unpickles) a PyGithub object from a file-like object.
:param f: the file-like object to unpickle from
:return: the unpickled object
"""
return self.create_from_raw_data(*pickle.load(f))
def get_api_status(self):
"""
This doesn't work with a Github Enterprise installation, because it always targets https://status.github.com.
:calls: `GET /api/status.json <https://status.github.com/api>`_
:rtype: :class:`github.Status.Status`
"""
headers, attributes = self.__requester.requestJsonAndCheck(
"GET",
"/api/status.json",
cnx="status"
)
return Status.Status(self.__requester, headers, attributes, completed=True)
def get_last_api_status_message(self):
"""
This doesn't work with a Github Enterprise installation, because it always targets https://status.github.com.
:calls: `GET /api/last-message.json <https://status.github.com/api>`_
:rtype: :class:`github.StatusMessage.StatusMessage`
"""
headers, attributes = self.__requester.requestJsonAndCheck(
"GET",
"/api/last-message.json",
cnx="status"
)
return StatusMessage.StatusMessage(self.__requester, headers, attributes, completed=True)
def get_api_status_messages(self):
"""
This doesn't work with a Github Enterprise installation, because it always targets https://status.github.com.
:calls: `GET /api/messages.json <https://status.github.com/api>`_
:rtype: list of :class:`github.StatusMessage.StatusMessage`
"""
headers, data = self.__requester.requestJsonAndCheck(
"GET",
"/api/messages.json",
cnx="status"
)
return [StatusMessage.StatusMessage(self.__requester, headers, attributes, completed=True) for attributes in data]
|
gyoto/Gyoto | refs/heads/master | python/gyoto/metric.py | 1 | '''Gyoto::Metric namespace
In order to emulate the C++ Gyoto::Metric namespace, this module will
load gyoto.std and gyoto.lorene (if available) and expose all Metrics
in here.
'''
import gyoto._namespaces as _namespaces
from gyoto.core import Metric as Generic
__all__ = _namespaces.make_namespace(Generic, globals())
del _namespaces
Complex=ComplexMetric
import gyoto.core
import numpy
def jacobian_numerical(metric, pos, epsilon=1e-6):
'''Estimate the Jacobian matrix of a metric numerically
This function is intended for debugging using. For production,
using the method gyoto.core.Metric.jacobian is preferred.
If `metric' is an instance of a subclass of gyoto.core.Metric,
jacobian_numerical(metric, pos) should yield the same as
metric.jacobian(pos) within numerical errors.
Keyword arguments:
metric -- the gyoto.core.Metric instance to work on
pos -- the coordinates at which to estimate the Christoffel symbols
epsilon -- the step for estimating of the derivatives (default 1e-6)
'''
delta=numpy.empty((4, 4, 4))
posa = numpy.asarray(pos)
posb = posa.copy()
ga = metric.gmunu(posa)
for alpha in range(4):
posb[alpha] += epsilon
gb = metric.gmunu(posb)
delta[alpha, :, :] = (gb-ga)/epsilon
posb[alpha]=posa[alpha]
return delta
def christoffel_numerical(metric, pos, epsilon=1e-6):
'''Estimate the Christoffel symbols of a metric numerically
This function is intended for debugging using. It is called by
gyoto.metric.check_christoffel for this purpose. For production,
using the method gyoto.core.Metric.christoffel is preferred.
If `metric' is an instance of a subclass of gyoto.core.Metric,
christoffel_numerical(metric, pos) should yield the same as
metric.christoffel(pos) within numerical errors.
This function estimates the Christoffel symbols by estimating
numerically the partial derivatives of the metric coefficients
(given by metric.gmunu(pos)) and inverting (also numerically) the
covariant metric coefficients matrix as pos.
Keyword arguments:
metric -- the gyoto.core.Metric instance to work on
pos -- the coordinates at which to estimate the Christoffel symbols
epsilon -- the step for estimating of the derivatives (default 1e-6)
'''
Gamma=numpy.empty((4, 4, 4))
delta=jacobian_numerical(metric, pos, epsilon=epsilon)
gup=metric.gmunu_up(pos)
for i in range(4):
for k in range(4):
for l in range(4):
Gamma[i, k, l] = (
0.5*gup[i, :]*
[
delta[l, :, k]
+delta[k, :, l]
-delta[:, k, l]
]).sum()
return Gamma
def check_christoffel(metric, poslist=None, epsilon=1e-6, abstol=1e-6, reltol=1e-6):
'''Check the christoffel method of a gyoto.core.Metric subclass
This method compares the Christoffel symbols of metric as given by
metric.christoffel(pos) to those numerically estimated by
christoffel_numerical(metric, pos). It raises an error if the
difference is too large.
The difference between the two estimates should always be smaller
than abstol.
In addition, if the value of the symbol is larger than abstol, the
relative error should be smaller than reltol.
Keyword arguments:
metric -- one of:
1- the gyoto.core.Metric instance to work on
(e.g. gyoto.std.KerrBL());
2- a gyoto.core.Metric subclass
(e.g. gyoto.std.KerrBL);
3- the name of a gyoto.core.metric subclass
(e.g. 'KerrBL').
poslist -- a Python list of 4-coordinates at which to check the
Christoffel symbols. By default, a small number of
arbitrary positions that depend on whether the
coordinate system is spherical or Cartesian and work
well fr the Kerr metric are used.
epsilon -- the step for estimating of the derivatives (default 1e-6)
abstol -- the absolute tolerance
retol -- the relative tolerance
'''
if isinstance(metric, str):
metric=gyoto.core.Metric(metric)
elif isinstance(metric, type):
metric=metric()
if poslist is None:
if metric.coordKind()==gyoto.core.GYOTO_COORDKIND_SPHERICAL:
poslist=[
(0., 6., numpy.pi/2, 0.),
(100., 50, numpy.pi/4, numpy.pi/6.)
]
elif metric.coordKind()==gyoto.core.GYOTO_COORDKIND_CARTESIAN:
poslist=[
(0., 6., 0., 0.),
(100., 50., 30., 50),
(1000., 0., 0., 40.)
]
else:
raise ValueError('Unknown coordinate kind')
for pos in poslist:
G=metric.christoffel(pos)
Gn=christoffel_numerical(metric, pos, epsilon)
for a in range(4):
for m in range(4):
for n in range(4):
e=numpy.abs(G[a, m, n]-Gn[a, m, n])
assert e <= abstol, "absolute error {} larger than {} at {} for kind={}, alpha={}, mu={}, nu={}, val={}".format(e, abstol, pos, metric.kind(), a, m, n, G[a, m, n])
avg=numpy.abs(0.5*(G[a, m, n]-Gn[a, m, n]))
if avg > abstol:
assert e/avg <= reltol, "relative error {} larger than {} at {} for kind={}, alpha={}, mu={}, nu={}".format(e/avg, reltol, pos, metric.kind(), a, m, n)
|
bdrung/audacity | refs/heads/master | lib-src/lv2/suil/waflib/Tools/c_tests.py | 330 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
from waflib import Task
from waflib.Configure import conf
from waflib.TaskGen import feature,before_method,after_method
import sys
LIB_CODE='''
#ifdef _MSC_VER
#define testEXPORT __declspec(dllexport)
#else
#define testEXPORT
#endif
testEXPORT int lib_func(void) { return 9; }
'''
MAIN_CODE='''
#ifdef _MSC_VER
#define testEXPORT __declspec(dllimport)
#else
#define testEXPORT
#endif
testEXPORT int lib_func(void);
int main(int argc, char **argv) {
(void)argc; (void)argv;
return !(lib_func() == 9);
}
'''
@feature('link_lib_test')
@before_method('process_source')
def link_lib_test_fun(self):
def write_test_file(task):
task.outputs[0].write(task.generator.code)
rpath=[]
if getattr(self,'add_rpath',False):
rpath=[self.bld.path.get_bld().abspath()]
mode=self.mode
m='%s %s'%(mode,mode)
ex=self.test_exec and'test_exec'or''
bld=self.bld
bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE)
bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE)
bld(features='%sshlib'%m,source='test.'+mode,target='test')
bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath)
@conf
def check_library(self,mode=None,test_exec=True):
if not mode:
mode='c'
if self.env.CXX:
mode='cxx'
self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,)
INLINE_CODE='''
typedef int foo_t;
static %s foo_t static_foo () {return 0; }
%s foo_t foo () {
return 0;
}
'''
INLINE_VALUES=['inline','__inline__','__inline']
@conf
def check_inline(self,**kw):
self.start_msg('Checking for inline')
if not'define_name'in kw:
kw['define_name']='INLINE_MACRO'
if not'features'in kw:
if self.env.CXX:
kw['features']=['cxx']
else:
kw['features']=['c']
for x in INLINE_VALUES:
kw['fragment']=INLINE_CODE%(x,x)
try:
self.check(**kw)
except self.errors.ConfigurationError:
continue
else:
self.end_msg(x)
if x!='inline':
self.define('inline',x,quote=False)
return x
self.fatal('could not use inline functions')
LARGE_FRAGMENT='''#include <unistd.h>
int main(int argc, char **argv) {
(void)argc; (void)argv;
return !(sizeof(off_t) >= 8);
}
'''
@conf
def check_large_file(self,**kw):
if not'define_name'in kw:
kw['define_name']='HAVE_LARGEFILE'
if not'execute'in kw:
kw['execute']=True
if not'features'in kw:
if self.env.CXX:
kw['features']=['cxx','cxxprogram']
else:
kw['features']=['c','cprogram']
kw['fragment']=LARGE_FRAGMENT
kw['msg']='Checking for large file support'
ret=True
try:
if self.env.DEST_BINFMT!='pe':
ret=self.check(**kw)
except self.errors.ConfigurationError:
pass
else:
if ret:
return True
kw['msg']='Checking for -D_FILE_OFFSET_BITS=64'
kw['defines']=['_FILE_OFFSET_BITS=64']
try:
ret=self.check(**kw)
except self.errors.ConfigurationError:
pass
else:
self.define('_FILE_OFFSET_BITS',64)
return ret
self.fatal('There is no support for large files')
ENDIAN_FRAGMENT='''
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
int use_ascii (int i) {
return ascii_mm[i] + ascii_ii[i];
}
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
int use_ebcdic (int i) {
return ebcdic_mm[i] + ebcdic_ii[i];
}
extern int foo;
'''
class grep_for_endianness(Task.Task):
color='PINK'
def run(self):
txt=self.inputs[0].read(flags='rb').decode('iso8859-1')
if txt.find('LiTTleEnDian')>-1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS')>-1:
self.generator.tmp.append('big')
else:
return-1
@feature('grep_for_endianness')
@after_method('process_source')
def grep_for_endianness_fun(self):
self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0])
@conf
def check_endianness(self):
tmp=[]
def check_msg(self):
return tmp[0]
self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg)
return tmp[0]
|
eneldoserrata/marcos_openerp | refs/heads/master | addons/web/__init__.py | 66 | import http
import controllers
import cli
wsgi_postload = http.wsgi_postload
|
daviddoria/PointGraphsPhase1 | refs/heads/PointGraphsPhase1 | Examples/GUI/Python/TransformWithBoxWidget.py | 15 | #!/usr/bin/env python
# Demonstrate how to use the vtkBoxWidget to translate, scale, and
# rotate actors. The basic idea is that the box widget controls an
# actor's transform. A callback which modifies the transform is
# invoked as the box widget is manipulated.
import vtk
# Start by creating some simple geometry; in this case a mace.
sphere = vtk.vtkSphereSource()
cone = vtk.vtkConeSource()
glyph = vtk.vtkGlyph3D()
glyph.SetInputConnection(sphere.GetOutputPort())
glyph.SetSource(cone.GetOutput())
glyph.SetVectorModeToUseNormal()
glyph.SetScaleModeToScaleByVector()
glyph.SetScaleFactor(0.25)
appendData = vtk.vtkAppendPolyData()
appendData.AddInput(glyph.GetOutput())
appendData.AddInput(sphere.GetOutput())
maceMapper = vtk.vtkPolyDataMapper()
maceMapper.SetInputConnection(appendData.GetOutputPort())
maceActor = vtk.vtkLODActor()
maceActor.SetMapper(maceMapper)
maceActor.VisibilityOn()
# Create the RenderWindow, Renderer and both Actors
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# The box widget observes the events invoked by the render window
# interactor. These events come from user interaction in the render
# window.
boxWidget = vtk.vtkBoxWidget()
boxWidget.SetInteractor(iren)
boxWidget.SetPlaceFactor(1.25)
# Add the actors to the renderer, set the background and window size.
ren.AddActor(maceActor)
ren.SetBackground(0.1, 0.2, 0.4)
renWin.SetSize(300, 300)
# As the box widget is interacted with, it produces a transformation
# matrix that is set on the actor.
t = vtk.vtkTransform()
def TransformActor(obj, event):
global t, maceActor
obj.GetTransform(t)
maceActor.SetUserTransform(t)
# Place the interactor initially. The actor is used to place and scale
# the interactor. An observer is added to the box widget to watch for
# interaction events. This event is captured and used to set the
# transformation matrix of the actor.
boxWidget.SetProp3D(maceActor)
boxWidget.PlaceWidget()
boxWidget.AddObserver("InteractionEvent", TransformActor)
iren.Initialize()
renWin.Render()
iren.Start()
|
EduPepperPDTesting/pepper2013-testing | refs/heads/www0 | lms/djangoapps/administration/migrations/0008_auto__chg_field_pepregtraining_date_create__chg_field_pepregtraining_d.py | 1 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'PepRegTraining.date_create'
db.alter_column('pepreg_training', 'date_create', self.gf('django.db.models.fields.DateField')())
# Changing field 'PepRegTraining.date_modify'
db.alter_column('pepreg_training', 'date_modify', self.gf('django.db.models.fields.DateField')())
# Changing field 'PepRegTraining.training_date'
db.alter_column('pepreg_training', 'training_date', self.gf('django.db.models.fields.DateField')())
# Changing field 'PepRegTraining.credits'
db.alter_column('pepreg_training', 'credits', self.gf('django.db.models.fields.FloatField')())
# Changing field 'PepRegTraining.training_time'
db.alter_column('pepreg_training', 'training_time', self.gf('django.db.models.fields.TimeField')())
def backwards(self, orm):
# Changing field 'PepRegTraining.date_create'
db.alter_column('pepreg_training', 'date_create', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'PepRegTraining.date_modify'
db.alter_column('pepreg_training', 'date_modify', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'PepRegTraining.training_date'
db.alter_column('pepreg_training', 'training_date', self.gf('django.db.models.fields.DateField')(auto_now_add=True))
# Changing field 'PepRegTraining.credits'
db.alter_column('pepreg_training', 'credits', self.gf('django.db.models.fields.IntegerField')())
# Changing field 'PepRegTraining.training_time'
db.alter_column('pepreg_training', 'training_time', self.gf('django.db.models.fields.TimeField')(auto_now_add=True))
models = {
'administration.adjustmenttimelog': {
'Meta': {'object_name': 'AdjustmentTimeLog', 'db_table': "'adjustment_time_log'"},
'adjustment_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'admin_email': ('django.db.models.fields.CharField', [], {'max_length': '75', 'db_index': 'True'}),
'comments': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'db_index': 'True'}),
'course_number': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'db_index': 'True'}),
'create_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'user_email': ('django.db.models.fields.CharField', [], {'max_length': '75', 'db_index': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {'max_length': '11'})
},
'administration.author': {
'Meta': {'object_name': 'Author', 'db_table': "'author'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'administration.certificate': {
'Meta': {'object_name': 'Certificate', 'db_table': "'certificate'"},
'association': ('django.db.models.fields.IntegerField', [], {}),
'association_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['administration.CertificateAssociationType']"}),
'certificate_blob': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'certificate_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'readonly': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'administration.certificateassociationtype': {
'Meta': {'object_name': 'CertificateAssociationType', 'db_table': "'certificate_association_type'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'administration.emailtask': {
'Meta': {'object_name': 'EmailTask', 'db_table': "'admin_email_task'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'process_emails': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'success_emails': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'total_emails': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['auth.User']"})
},
'administration.emailtasklog': {
'Meta': {'object_name': 'EmailTaskLog', 'db_table': "'admin_email_task_log'"},
'district_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '75', 'db_index': 'True'}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'send_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['administration.EmailTask']", 'on_delete': 'models.PROTECT'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'})
},
'administration.filterfavorite': {
'Meta': {'object_name': 'FilterFavorite', 'db_table': "'admin_filter_favorite'"},
'filter_json': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'administration.hangoutpermissions': {
'Meta': {'object_name': 'HangoutPermissions', 'db_table': "'hangout_permissions'"},
'district': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.District']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permission': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'administration.importtask': {
'Meta': {'object_name': 'ImportTask', 'db_table': "'admin_import_task'"},
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'process_lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'success_lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'total_lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['auth.User']"})
},
'administration.importtasklog': {
'Meta': {'object_name': 'ImportTaskLog', 'db_table': "'admin_import_task_log'"},
'create_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'import_data': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'line': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['administration.ImportTask']", 'on_delete': 'models.PROTECT'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'})
},
'administration.pepreginstructor': {
'Meta': {'object_name': 'PepRegInstructor', 'db_table': "'pepreg_instructor'"},
'date_create': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"}),
'training': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['administration.PepRegTraining']"}),
'user_create': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"})
},
'administration.pepregstudent': {
'Meta': {'object_name': 'PepRegStudent', 'db_table': "'pepreg_student'"},
'date_create': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modify': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"}),
'student_credit': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'student_status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'training': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['administration.PepRegTraining']"}),
'user_create': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"}),
'user_modify': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"})
},
'administration.pepregtraining': {
'Meta': {'object_name': 'PepRegTraining', 'db_table': "'pepreg_training'"},
'allow_attendance': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_registration': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_validation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'attendancel_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'classroom': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'credits': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'date_create': ('django.db.models.fields.DateField', [], {}),
'date_modify': ('django.db.models.fields.DateField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'district': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.District']"}),
'geo_location': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'geo_props': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_registration': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pepper_course': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'training_date': ('django.db.models.fields.DateField', [], {}),
'training_time': ('django.db.models.fields.TimeField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user_create': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"}),
'user_modify': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"})
},
'administration.timereportperm': {
'Meta': {'object_name': 'TimeReportPerm', 'db_table': "'time_report_perm'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['auth.User']"})
},
'administration.timereporttask': {
'Meta': {'object_name': 'TimeReportTask', 'db_table': "'admin_time_report_task'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'process_num': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'success_num': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'total_num': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.district': {
'Meta': {'object_name': 'District', 'db_table': "'district'"},
'code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'state': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.State']", 'on_delete': 'models.PROTECT'})
},
'student.state': {
'Meta': {'object_name': 'State', 'db_table': "'state'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'so': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['administration'] |
ernesto-g/micropython | refs/heads/master | tests/micropython/heapalloc.py | 52 | # check that we can do certain things without allocating heap memory
import gc
def f1(a):
print(a)
def f2(a, b=2):
print(a, b)
def f3(a, b, c, d):
x1 = x2 = a
x3 = x4 = b
x5 = x6 = c
x7 = x8 = d
print(x1, x3, x5, x7, x2 + x4 + x6 + x8)
global_var = 1
def test():
global global_var
global_var = 2 # set an existing global variable
for i in range(2): # for loop
f1(i) # function call
f1(i * 2 + 1) # binary operation with small ints
f1(a=i) # keyword arguments
f2(i) # default arg (second one)
f2(i, i) # 2 args
f3(1, 2, 3, 4) # function with lots of local state
# call h with heap allocation disabled and all memory used up
gc.disable()
try:
while True:
'a'.lower # allocates 1 cell for boundmeth
except MemoryError:
pass
test()
gc.enable()
|
sergiocorato/bank-payment | refs/heads/8.0 | account_banking/__openerp__.py | 10 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2009 EduSense BV (<http://www.edusense.nl>).
# (C) 2011 Therp BV (<http://therp.nl>).
# (C) 2011 Smile (<http://smile.fr>).
#
# All other contributions are (C) by their respective contributors
#
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Banking',
'version': '0.4',
'license': 'AGPL-3',
'author': "Banking addons community,Odoo Community Association (OCA)",
'website': 'https://launchpad.net/banking-addons',
'category': 'Banking addons',
'depends': [
'account_voucher',
],
'data': [
'security/ir.model.access.csv',
'data/account_banking_data.xml',
'wizard/bank_import_view.xml',
'account_banking_view.xml',
'wizard/banking_transaction_wizard.xml',
'wizard/link_partner.xml',
'workflow/account_invoice.xml',
],
'js': [
'static/src/js/account_banking.js',
],
'description': '''
Module to do banking.
This modules tries to combine all current banking import and export
schemes. Rationale for this is that it is quite common to have foreign
bank account numbers next to national bank account numbers. The current
approach, which hides the national banking interface schemes in the
l10n_xxx modules, makes it very difficult to use these simultanious.
A more banking oriented approach seems more logical and cleaner.
Changes to default OpenERP:
* Puts focus on the real life messaging with banks:
+ Bank statement lines upgraded to independent bank transactions.
+ Banking statements have no special accountancy meaning, they're just
message envelopes for a number of bank transactions.
+ Bank statements can be either encoded by hand to reflect the document
version of Bank Statements, or created as an optional side effect of
importing Bank Transactions.
* Preparations for SEPA:
+ IBAN accounts are the standard in the SEPA countries
+ local accounts are derived from SEPA (excluding Turkey) but are
considered to be identical to the corresponding SEPA account.
+ Banks are identified with either Country + Bank code + Branch code or
BIC
+ Each bank can have its own pace in introducing SEPA into their
communication with their customers.
+ National online databases can be used to convert BBAN's to IBAN's.
+ The SWIFT database is consulted for bank information.
* Adds dropin extensible import facility for bank communication in:
- Drop-in input parser development.
- MultiBank (NL) format transaction files available as
account_banking_nl_multibank,
* Extends payments for digital banking:
+ Adapted workflow in payments to reflect banking operations
+ Relies on account_payment mechanics to extend with export generators.
- ClieOp3 (NL) payment and direct debit orders files available as
account_banking_nl_clieop
* Additional features for the import/export mechanism:
+ Automatic matching and creation of bank accounts, banks and partners,
during import of statements.
+ Automatic matching with invoices and payments.
+ Sound import mechanism, allowing multiple imports of the same
transactions repeated over multiple files.
+ Journal configuration per bank account.
+ Business logic and format parsing strictly separated to ease the
development of new parsers.
+ No special configuration needed for the parsers, new parsers are
recognized and made available at server (re)start.
''',
'installable': False,
'auto_install': False,
}
|
PokemonGoF/PokemonGo-Bot-Desktop | refs/heads/development | build/pywin/Lib/urllib.py | 8 | """Open an arbitrary URL.
See the following document for more info on URLs:
"Names and Addresses, URIs, URLs, URNs, URCs", at
http://www.w3.org/pub/WWW/Addressing/Overview.html
See also the HTTP spec (from which the error codes are derived):
"HTTP - Hypertext Transfer Protocol", at
http://www.w3.org/pub/WWW/Protocols/
Related standards and specs:
- RFC1808: the "relative URL" spec. (authoritative status)
- RFC1738 - the "URL standard". (authoritative status)
- RFC1630 - the "URI spec". (informational status)
The object returned by URLopener().open(file) will differ per
protocol. All you know is that is has methods read(), readline(),
readlines(), fileno(), close() and info(). The read*(), fileno()
and close() methods work like those of open files.
The info() method returns a mimetools.Message object which can be
used to query various info about the object, if available.
(mimetools.Message objects are queried with the getheader() method.)
"""
import string
import socket
import os
import time
import sys
import base64
import re
from urlparse import urljoin as basejoin
__all__ = ["urlopen", "URLopener", "FancyURLopener", "urlretrieve",
"urlcleanup", "quote", "quote_plus", "unquote", "unquote_plus",
"urlencode", "url2pathname", "pathname2url", "splittag",
"localhost", "thishost", "ftperrors", "basejoin", "unwrap",
"splittype", "splithost", "splituser", "splitpasswd", "splitport",
"splitnport", "splitquery", "splitattr", "splitvalue",
"getproxies"]
__version__ = '1.17' # XXX This version is not always updated :-(
MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
# Helper for non-unix systems
if os.name == 'nt':
from nturl2path import url2pathname, pathname2url
elif os.name == 'riscos':
from rourl2path import url2pathname, pathname2url
else:
def url2pathname(pathname):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
return unquote(pathname)
def pathname2url(pathname):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
return quote(pathname)
# This really consists of two pieces:
# (1) a class which handles opening of all sorts of URLs
# (plus assorted utilities etc.)
# (2) a set of functions for parsing URLs
# XXX Should these be separated out into different modules?
# Shortcut for basic usage
_urlopener = None
def urlopen(url, data=None, proxies=None, context=None):
"""Create a file-like object for the specified URL to read from."""
from warnings import warnpy3k
warnpy3k("urllib.urlopen() has been removed in Python 3.0 in "
"favor of urllib2.urlopen()", stacklevel=2)
global _urlopener
if proxies is not None or context is not None:
opener = FancyURLopener(proxies=proxies, context=context)
elif not _urlopener:
opener = FancyURLopener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
def urlretrieve(url, filename=None, reporthook=None, data=None, context=None):
global _urlopener
if context is not None:
opener = FancyURLopener(context=context)
elif not _urlopener:
_urlopener = opener = FancyURLopener()
else:
opener = _urlopener
return opener.retrieve(url, filename, reporthook, data)
def urlcleanup():
if _urlopener:
_urlopener.cleanup()
_safe_quoters.clear()
ftpcache.clear()
# check for SSL
try:
import ssl
except:
_have_ssl = False
else:
_have_ssl = True
# exception raised when downloaded size does not match content-length
class ContentTooShortError(IOError):
def __init__(self, message, content):
IOError.__init__(self, message)
self.content = content
ftpcache = {}
class URLopener:
"""Class to open URLs.
This is a class rather than just a subroutine because we may need
more than one set of global protocol-specific options.
Note -- this is a base class for those who don't want the
automatic handling of errors type 302 (relocated) and 401
(authorization needed)."""
__tempfiles = None
version = "Python-urllib/%s" % __version__
# Constructor
def __init__(self, proxies=None, context=None, **x509):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file')
self.context = context
self.addheaders = [('User-Agent', self.version)]
self.__tempfiles = []
self.__unlink = os.unlink # See cleanup()
self.tempcache = None
# Undocumented feature: if you assign {} to tempcache,
# it is used to cache files retrieved with
# self.retrieve(). This is not enabled by default
# since it does not work for changing documents (and I
# haven't got the logic to check expiration headers
# yet).
self.ftpcache = ftpcache
# Undocumented feature: you can use a different
# ftp cache by assigning to the .ftpcache member;
# in case you want logically independent URL openers
# XXX This is not threadsafe. Bah.
def __del__(self):
self.close()
def close(self):
self.cleanup()
def cleanup(self):
# This code sometimes runs when the rest of this module
# has already been deleted, so it can't use any globals
# or import anything.
if self.__tempfiles:
for file in self.__tempfiles:
try:
self.__unlink(file)
except OSError:
pass
del self.__tempfiles[:]
if self.tempcache:
self.tempcache.clear()
def addheader(self, *args):
"""Add a header to be used by the HTTP interface only
e.g. u.addheader('Accept', 'sound/basic')"""
self.addheaders.append(args)
# External interface
def open(self, fullurl, data=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(toBytes(fullurl))
# percent encode url, fixing lame server errors for e.g, like space
# within url paths.
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
urltype, url = splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
urltype, proxyhost = splittype(proxy)
host, selector = splithost(proxyhost)
url = (host, fullurl) # Signal special case to open_*()
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
if not hasattr(self, name):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
return self.open_unknown(fullurl, data)
try:
if data is None:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
except socket.error, msg:
raise IOError, ('socket error', msg), sys.exc_info()[2]
def open_unknown(self, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError, ('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError, ('url error', 'invalid proxy for %s' % type, proxy)
# External interface
def retrieve(self, url, filename=None, reporthook=None, data=None):
"""retrieve(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object."""
url = unwrap(toBytes(url))
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
type, url1 = splittype(url)
if filename is None and (not type or type == 'file'):
try:
fp = self.open_local_file(url1)
hdrs = fp.info()
fp.close()
return url2pathname(splithost(url1)[1]), hdrs
except IOError:
pass
fp = self.open(url, data)
try:
headers = fp.info()
if filename:
tfp = open(filename, 'wb')
else:
import tempfile
garbage, path = splittype(url)
garbage, path = splithost(path or "")
path, garbage = splitquery(path or "")
path, garbage = splitattr(path or "")
suffix = os.path.splitext(path)[1]
(fd, filename) = tempfile.mkstemp(suffix)
self.__tempfiles.append(filename)
tfp = os.fdopen(fd, 'wb')
try:
result = filename, headers
if self.tempcache is not None:
self.tempcache[url] = result
bs = 1024*8
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, bs, size)
while 1:
block = fp.read(bs)
if block == "":
break
read += len(block)
tfp.write(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
finally:
tfp.close()
finally:
fp.close()
# raise exception if actual size does not match content-length header
if size >= 0 and read < size:
raise ContentTooShortError("retrieval incomplete: got only %i out "
"of %i bytes" % (read, size), result)
return result
# Each method named open_<type> knows how to open that type of URL
def open_http(self, url, data=None):
"""Use HTTP protocol."""
import httplib
user_passwd = None
proxy_passwd= None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# check whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
# now we proceed with the url we want to obtain
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
if proxy_bypass(realhost):
host = realhost
#print "proxy via http:", host, selector
if not host: raise IOError, ('http error', 'no host given')
if proxy_passwd:
proxy_passwd = unquote(proxy_passwd)
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
user_passwd = unquote(user_passwd)
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTP(host)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-Type', 'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "http:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers, data)
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
"""Handle http errors.
Derived class can override this, or provide specific handlers
named http_error_DDD where DDD is the 3-digit error code."""
# First check if there's a specific handler for this error
name = 'http_error_%d' % errcode
if hasattr(self, name):
method = getattr(self, name)
if data is None:
result = method(url, fp, errcode, errmsg, headers)
else:
result = method(url, fp, errcode, errmsg, headers, data)
if result: return result
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handler: close the connection and raise IOError."""
fp.close()
raise IOError, ('http error', errcode, errmsg, headers)
if _have_ssl:
def open_https(self, url, data=None):
"""Use HTTPS protocol."""
import httplib
user_passwd = None
proxy_passwd = None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# here, we determine, whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'https':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
#print "proxy via https:", host, selector
if not host: raise IOError, ('https error', 'no host given')
if proxy_passwd:
proxy_passwd = unquote(proxy_passwd)
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
user_passwd = unquote(user_passwd)
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTPS(host, 0,
key_file=self.key_file,
cert_file=self.cert_file,
context=self.context)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-Type',
'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "https:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers,
data)
def open_file(self, url):
"""Use local file or FTP depending on form of URL."""
if not isinstance(url, str):
raise IOError, ('file error', 'proxy support for file protocol currently not implemented')
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
return self.open_ftp(url)
else:
return self.open_local_file(url)
def open_local_file(self, url):
"""Use local file."""
import mimetypes, mimetools, email.utils
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
host, file = splithost(url)
localname = url2pathname(file)
try:
stats = os.stat(localname)
except OSError, e:
raise IOError(e.errno, e.strerror, e.filename)
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(url)[0]
headers = mimetools.Message(StringIO(
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if not host:
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
elif file[:2] == './':
raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url)
return addinfourl(open(localname, 'rb'),
headers, urlfile)
host, port = splitport(host)
if not port \
and socket.gethostbyname(host) in (localhost(), thishost()):
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'),
headers, urlfile)
raise IOError, ('local file error', 'not on local host')
def open_ftp(self, url):
"""Use FTP protocol."""
if not isinstance(url, str):
raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented')
import mimetypes, mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
host, path = splithost(url)
if not host: raise IOError, ('ftp error', 'no host given')
host, port = splitport(host)
user, host = splituser(host)
if user: user, passwd = splitpasswd(user)
else: passwd = None
host = unquote(host)
user = user or ''
passwd = passwd or ''
host = socket.gethostbyname(host)
if not port:
import ftplib
port = ftplib.FTP_PORT
else:
port = int(port)
path, attrs = splitattr(path)
path = unquote(path)
dirs = path.split('/')
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]: dirs = dirs[1:]
if dirs and not dirs[0]: dirs[0] = '/'
key = user, host, port, '/'.join(dirs)
# XXX thread unsafe!
if len(self.ftpcache) > MAXFTPCACHE:
# Prune the cache, rather arbitrarily
for k in self.ftpcache.keys():
if k != key:
v = self.ftpcache[k]
del self.ftpcache[k]
v.close()
try:
if not key in self.ftpcache:
self.ftpcache[key] = \
ftpwrapper(user, passwd, host, port, dirs)
if not file: type = 'D'
else: type = 'I'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
mtype = mimetypes.guess_type("ftp:" + url)[0]
headers = ""
if mtype:
headers += "Content-Type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-Length: %d\n" % retrlen
headers = mimetools.Message(StringIO(headers))
return addinfourl(fp, headers, "ftp:" + url)
except ftperrors(), msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def open_data(self, url, data=None):
"""Use "data" URL."""
if not isinstance(url, str):
raise IOError, ('data error', 'proxy support for data protocol currently not implemented')
# ignore POSTed data
#
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
[type, data] = url.split(',', 1)
except ValueError:
raise IOError, ('data error', 'bad data URL')
if not type:
type = 'text/plain;charset=US-ASCII'
semi = type.rfind(';')
if semi >= 0 and '=' not in type[semi:]:
encoding = type[semi+1:]
type = type[:semi]
else:
encoding = ''
msg = []
msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
time.gmtime(time.time())))
msg.append('Content-type: %s' % type)
if encoding == 'base64':
data = base64.decodestring(data)
else:
data = unquote(data)
msg.append('Content-Length: %d' % len(data))
msg.append('')
msg.append(data)
msg = '\n'.join(msg)
f = StringIO(msg)
headers = mimetools.Message(f, 0)
#f.fileno = None # needed for addinfourl
return addinfourl(f, headers, url)
class FancyURLopener(URLopener):
"""Derived class with handlers for errors we can handle (perhaps)."""
def __init__(self, *args, **kwargs):
URLopener.__init__(self, *args, **kwargs)
self.auth_cache = {}
self.tries = 0
self.maxtries = 10
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handling -- don't raise an exception."""
return addinfourl(fp, headers, "http:" + url, errcode)
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 302 -- relocated (temporarily)."""
self.tries += 1
try:
if self.maxtries and self.tries >= self.maxtries:
if hasattr(self, "http_error_500"):
meth = self.http_error_500
else:
meth = self.http_error_default
return meth(url, fp, 500,
"Internal Server Error: Redirect Recursion",
headers)
result = self.redirect_internal(url, fp, errcode, errmsg,
headers, data)
return result
finally:
self.tries = 0
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
if 'location' in headers:
newurl = headers['location']
elif 'uri' in headers:
newurl = headers['uri']
else:
return
fp.close()
# In case the server sent a relative URL, join with original:
newurl = basejoin(self.type + ":" + url, newurl)
# For security reasons we do not allow redirects to protocols
# other than HTTP, HTTPS or FTP.
newurl_lower = newurl.lower()
if not (newurl_lower.startswith('http://') or
newurl_lower.startswith('https://') or
newurl_lower.startswith('ftp://')):
raise IOError('redirect error', errcode,
errmsg + " - Redirection to url '%s' is not allowed" %
newurl,
headers)
return self.open(newurl)
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 301 -- also relocated (permanently)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 303 -- also relocated (essentially identical to 302)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 307 -- relocated, but turn POST into error."""
if data is None:
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
else:
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 401 -- authentication required.
This function supports Basic authentication only."""
if not 'www-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['www-authenticate']
import re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
name = 'retry_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 407 -- proxy authentication required.
This function supports Basic authentication only."""
if not 'proxy-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['proxy-authenticate']
import re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
name = 'retry_proxy_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def retry_proxy_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'http://' + host + selector
proxy = self.proxies['http']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
self.proxies['http'] = 'http://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_proxy_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'https://' + host + selector
proxy = self.proxies['https']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
self.proxies['https'] = 'https://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
newurl = 'http://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
newurl = 'https://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def get_user_passwd(self, host, realm, clear_cache=0):
key = realm + '@' + host.lower()
if key in self.auth_cache:
if clear_cache:
del self.auth_cache[key]
else:
return self.auth_cache[key]
user, passwd = self.prompt_user_passwd(host, realm)
if user or passwd: self.auth_cache[key] = (user, passwd)
return user, passwd
def prompt_user_passwd(self, host, realm):
"""Override this in a GUI environment!"""
import getpass
try:
user = raw_input("Enter username for %s at %s: " % (realm,
host))
passwd = getpass.getpass("Enter password for %s in %s at %s: " %
(user, realm, host))
return user, passwd
except KeyboardInterrupt:
print
return None, None
# Utility functions
_localhost = None
def localhost():
"""Return the IP address of the magic hostname 'localhost'."""
global _localhost
if _localhost is None:
_localhost = socket.gethostbyname('localhost')
return _localhost
_thishost = None
def thishost():
"""Return the IP address of the current host."""
global _thishost
if _thishost is None:
try:
_thishost = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
_thishost = socket.gethostbyname('localhost')
return _thishost
_ftperrors = None
def ftperrors():
"""Return the set of errors raised by the FTP class."""
global _ftperrors
if _ftperrors is None:
import ftplib
_ftperrors = ftplib.all_errors
return _ftperrors
_noheaders = None
def noheaders():
"""Return an empty mimetools.Message object."""
global _noheaders
if _noheaders is None:
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
_noheaders = mimetools.Message(StringIO(), 0)
_noheaders.fp.close() # Recycle file descriptor
return _noheaders
# Utility classes
class ftpwrapper:
"""Class used by open_ftp() for cache of open FTP connections."""
def __init__(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
persistent=True):
self.user = user
self.passwd = passwd
self.host = host
self.port = port
self.dirs = dirs
self.timeout = timeout
self.refcount = 0
self.keepalive = persistent
try:
self.init()
except:
self.close()
raise
def init(self):
import ftplib
self.busy = 0
self.ftp = ftplib.FTP()
self.ftp.connect(self.host, self.port, self.timeout)
self.ftp.login(self.user, self.passwd)
_target = '/'.join(self.dirs)
self.ftp.cwd(_target)
def retrfile(self, file, type):
import ftplib
self.endtransfer()
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
else: cmd = 'TYPE ' + type; isdir = 0
try:
self.ftp.voidcmd(cmd)
except ftplib.all_errors:
self.init()
self.ftp.voidcmd(cmd)
conn = None
if file and not isdir:
# Try to retrieve as a file
try:
cmd = 'RETR ' + file
conn, retrlen = self.ftp.ntransfercmd(cmd)
except ftplib.error_perm, reason:
if str(reason)[:3] != '550':
raise IOError, ('ftp error', reason), sys.exc_info()[2]
if not conn:
# Set transfer mode to ASCII!
self.ftp.voidcmd('TYPE A')
# Try a directory listing. Verify that directory exists.
if file:
pwd = self.ftp.pwd()
try:
try:
self.ftp.cwd(file)
except ftplib.error_perm, reason:
raise IOError, ('ftp error', reason), sys.exc_info()[2]
finally:
self.ftp.cwd(pwd)
cmd = 'LIST ' + file
else:
cmd = 'LIST'
conn, retrlen = self.ftp.ntransfercmd(cmd)
self.busy = 1
ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
self.refcount += 1
conn.close()
# Pass back both a suitably decorated object and a retrieval length
return (ftpobj, retrlen)
def endtransfer(self):
self.busy = 0
def close(self):
self.keepalive = False
if self.refcount <= 0:
self.real_close()
def file_close(self):
self.endtransfer()
self.refcount -= 1
if self.refcount <= 0 and not self.keepalive:
self.real_close()
def real_close(self):
self.endtransfer()
try:
self.ftp.close()
except ftperrors():
pass
class addbase:
"""Base class for addinfo and addclosehook."""
def __init__(self, fp):
self.fp = fp
self.read = self.fp.read
self.readline = self.fp.readline
if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines
if hasattr(self.fp, "fileno"):
self.fileno = self.fp.fileno
else:
self.fileno = lambda: None
if hasattr(self.fp, "__iter__"):
self.__iter__ = self.fp.__iter__
if hasattr(self.fp, "next"):
self.next = self.fp.next
def __repr__(self):
return '<%s at %r whose fp = %r>' % (self.__class__.__name__,
id(self), self.fp)
def close(self):
self.read = None
self.readline = None
self.readlines = None
self.fileno = None
if self.fp: self.fp.close()
self.fp = None
class addclosehook(addbase):
"""Class to add a close hook to an open file."""
def __init__(self, fp, closehook, *hookargs):
addbase.__init__(self, fp)
self.closehook = closehook
self.hookargs = hookargs
def close(self):
try:
closehook = self.closehook
hookargs = self.hookargs
if closehook:
self.closehook = None
self.hookargs = None
closehook(*hookargs)
finally:
addbase.close(self)
class addinfo(addbase):
"""class to add an info() method to an open file."""
def __init__(self, fp, headers):
addbase.__init__(self, fp)
self.headers = headers
def info(self):
return self.headers
class addinfourl(addbase):
"""class to add info() and geturl() methods to an open file."""
def __init__(self, fp, headers, url, code=None):
addbase.__init__(self, fp)
self.headers = headers
self.url = url
self.code = code
def info(self):
return self.headers
def getcode(self):
return self.code
def geturl(self):
return self.url
# Utilities to parse URLs (most of these return None for missing parts):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
# splittype('type:opaquestring') --> 'type', 'opaquestring'
# splithost('//host[:port]/path') --> 'host[:port]', '/path'
# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
# splitpasswd('user:passwd') -> 'user', 'passwd'
# splitport('host:port') --> 'host', 'port'
# splitquery('/path?query') --> '/path', 'query'
# splittag('/path#tag') --> '/path', 'tag'
# splitattr('/path;attr1=value1;attr2=value2;...') ->
# '/path', ['attr1=value1', 'attr2=value2', ...]
# splitvalue('attr=value') --> 'attr', 'value'
# unquote('abc%20def') -> 'abc def'
# quote('abc def') -> 'abc%20def')
try:
unicode
except NameError:
def _is_unicode(x):
return 0
else:
def _is_unicode(x):
return isinstance(x, unicode)
def toBytes(url):
"""toBytes(u"URL") --> 'URL'."""
# Most URL schemes require ASCII. If that changes, the conversion
# can be relaxed
if _is_unicode(url):
try:
url = url.encode("ASCII")
except UnicodeError:
raise UnicodeError("URL " + repr(url) +
" contains non-ASCII characters")
return url
def unwrap(url):
"""unwrap('<URL:type://host/path>') --> 'type://host/path'."""
url = url.strip()
if url[:1] == '<' and url[-1:] == '>':
url = url[1:-1].strip()
if url[:4] == 'URL:': url = url[4:].strip()
return url
_typeprog = None
def splittype(url):
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
global _typeprog
if _typeprog is None:
import re
_typeprog = re.compile('^([^/:]+):')
match = _typeprog.match(url)
if match:
scheme = match.group(1)
return scheme.lower(), url[len(scheme) + 1:]
return None, url
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/?]*)(.*)$')
match = _hostprog.match(url)
if match:
host_port = match.group(1)
path = match.group(2)
if path and not path.startswith('/'):
path = '/' + path
return host_port, path
return None, url
_userprog = None
def splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
global _userprog
if _userprog is None:
import re
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match: return match.group(1, 2)
return None, host
_passwdprog = None
def splitpasswd(user):
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
global _passwdprog
if _passwdprog is None:
import re
_passwdprog = re.compile('^([^:]*):(.*)$',re.S)
match = _passwdprog.match(user)
if match: return match.group(1, 2)
return user, None
# splittag('/path#tag') --> '/path', 'tag'
_portprog = None
def splitport(host):
"""splitport('host:port') --> 'host', 'port'."""
global _portprog
if _portprog is None:
import re
_portprog = re.compile('^(.*):([0-9]*)$')
match = _portprog.match(host)
if match:
host, port = match.groups()
if port:
return host, port
return host, None
_nportprog = None
def splitnport(host, defport=-1):
"""Split host and port, returning numeric port.
Return given default port if no ':' found; defaults to -1.
Return numerical port if a valid number are found after ':'.
Return None if ':' but not a valid number."""
global _nportprog
if _nportprog is None:
import re
_nportprog = re.compile('^(.*):(.*)$')
match = _nportprog.match(host)
if match:
host, port = match.group(1, 2)
if port:
try:
nport = int(port)
except ValueError:
nport = None
return host, nport
return host, defport
_queryprog = None
def splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
global _queryprog
if _queryprog is None:
import re
_queryprog = re.compile('^(.*)\?([^?]*)$')
match = _queryprog.match(url)
if match: return match.group(1, 2)
return url, None
_tagprog = None
def splittag(url):
"""splittag('/path#tag') --> '/path', 'tag'."""
global _tagprog
if _tagprog is None:
import re
_tagprog = re.compile('^(.*)#([^#]*)$')
match = _tagprog.match(url)
if match: return match.group(1, 2)
return url, None
def splitattr(url):
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
'/path', ['attr1=value1', 'attr2=value2', ...]."""
words = url.split(';')
return words[0], words[1:]
_valueprog = None
def splitvalue(attr):
"""splitvalue('attr=value') --> 'attr', 'value'."""
global _valueprog
if _valueprog is None:
import re
_valueprog = re.compile('^([^=]*)=(.*)$')
match = _valueprog.match(attr)
if match: return match.group(1, 2)
return attr, None
# urlparse contains a duplicate of this method to avoid a circular import. If
# you update this method, also update the copy in urlparse. This code
# duplication does not exist in Python3.
_hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a + b, chr(int(a + b, 16)))
for a in _hexdig for b in _hexdig)
_asciire = re.compile('([\x00-\x7f]+)')
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
if _is_unicode(s):
if '%' not in s:
return s
bits = _asciire.split(s)
res = [bits[0]]
append = res.append
for i in range(1, len(bits), 2):
append(unquote(str(bits[i])).decode('latin1'))
append(bits[i + 1])
return ''.join(res)
bits = s.split('%')
# fastpath
if len(bits) == 1:
return s
res = [bits[0]]
append = res.append
for item in bits[1:]:
try:
append(_hextochr[item[:2]])
append(item[2:])
except KeyError:
append('%')
append(item)
return ''.join(res)
def unquote_plus(s):
"""unquote('%7e/abc+def') -> '~/abc def'"""
s = s.replace('+', ' ')
return unquote(s)
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
_safe_map = {}
for i, c in zip(xrange(256), str(bytearray(xrange(256)))):
_safe_map[c] = c if (i < 128 and c in always_safe) else '%{:02X}'.format(i)
_safe_quoters = {}
def quote(s, safe='/'):
"""quote('abc def') -> 'abc%20def'
Each part of a URL, e.g. the path info, the query, etc., has a
different set of reserved characters that must be quoted.
RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
the following reserved characters.
reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
"$" | ","
Each of these characters is reserved in some component of a URL,
but not necessarily in all of them.
By default, the quote function is intended for quoting the path
section of a URL. Thus, it will not encode '/'. This character
is reserved, but in typical usage the quote function is being
called on a path where the existing slash characters are used as
reserved characters.
"""
# fastpath
if not s:
if s is None:
raise TypeError('None object cannot be quoted')
return s
cachekey = (safe, always_safe)
try:
(quoter, safe) = _safe_quoters[cachekey]
except KeyError:
safe_map = _safe_map.copy()
safe_map.update([(c, c) for c in safe])
quoter = safe_map.__getitem__
safe = always_safe + safe
_safe_quoters[cachekey] = (quoter, safe)
if not s.rstrip(safe):
return s
return ''.join(map(quoter, s))
def quote_plus(s, safe=''):
"""Quote the query fragment of a URL; replacing ' ' with '+'"""
if ' ' in s:
s = quote(s, safe + ' ')
return s.replace(' ', '+')
return quote(s, safe)
def urlencode(query, doseq=0):
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
If any values in the query arg are sequences and doseq is true, each
sequence element is converted to a separate parameter.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query,"items"):
# mapping objects
query = query.items()
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty,va,tb = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", tb
l = []
if not doseq:
# preserve old behavior
for k, v in query:
k = quote_plus(str(k))
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
for k, v in query:
k = quote_plus(str(k))
if isinstance(v, str):
v = quote_plus(v)
l.append(k + '=' + v)
elif _is_unicode(v):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII","replace"))
l.append(k + '=' + v)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
# loop over the sequence
for elt in v:
l.append(k + '=' + quote_plus(str(elt)))
return '&'.join(l)
# Proxy handling
def getproxies_environment():
"""Return a dictionary of scheme -> proxy server URL mappings.
Scan the environment for variables named <scheme>_proxy;
this seems to be the standard convention. In order to prefer lowercase
variables, we process the environment in two passes, first matches any
and second matches only lower case proxies.
If you need a different way, you can pass a proxies dictionary to the
[Fancy]URLopener constructor.
"""
proxies = {}
for name, value in os.environ.items():
name = name.lower()
if value and name[-6:] == '_proxy':
proxies[name[:-6]] = value
for name, value in os.environ.items():
if name[-6:] == '_proxy':
name = name.lower()
if value:
proxies[name[:-6]] = value
else:
proxies.pop(name[:-6], None)
return proxies
def proxy_bypass_environment(host, proxies=None):
"""Test if proxies should not be used for a particular host.
Checks the proxies dict for the value of no_proxy, which should be a
list of comma separated DNS suffixes, or '*' for all hosts.
"""
if proxies is None:
proxies = getproxies_environment()
# don't bypass, if no_proxy isn't specified
try:
no_proxy = proxies['no']
except KeyError:
return 0
# '*' is special case for always bypass
if no_proxy == '*':
return 1
# strip port off host
hostonly, port = splitport(host)
# check if the host ends with any of the DNS suffixes
no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
for name in no_proxy_list:
if name:
name = re.escape(name)
pattern = r'(.+\.)?%s$' % name
if (re.match(pattern, hostonly, re.I)
or re.match(pattern, host, re.I)):
return 1
# otherwise, don't bypass
return 0
if sys.platform == 'darwin':
from _scproxy import _get_proxy_settings, _get_proxies
def proxy_bypass_macosx_sysconf(host):
"""
Return True iff this host shouldn't be accessed using a proxy
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
import re
import socket
from fnmatch import fnmatch
hostonly, port = splitport(host)
def ip2num(ipAddr):
parts = ipAddr.split('.')
parts = map(int, parts)
if len(parts) != 4:
parts = (parts + [0, 0, 0, 0])[:4]
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
proxy_settings = _get_proxy_settings()
# Check for simple host names:
if '.' not in host:
if proxy_settings['exclude_simple']:
return True
hostIP = None
for value in proxy_settings.get('exceptions', ()):
# Items in the list are strings like these: *.local, 169.254/16
if not value: continue
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
if m is not None:
if hostIP is None:
try:
hostIP = socket.gethostbyname(hostonly)
hostIP = ip2num(hostIP)
except socket.error:
continue
base = ip2num(m.group(1))
mask = m.group(2)
if mask is None:
mask = 8 * (m.group(1).count('.') + 1)
else:
mask = int(mask[1:])
mask = 32 - mask
if (hostIP >> mask) == (base >> mask):
return True
elif fnmatch(host, value):
return True
return False
def getproxies_macosx_sysconf():
"""Return a dictionary of scheme -> proxy server URL mappings.
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
return _get_proxies()
def proxy_bypass(host):
"""Return True, if a host should be bypassed.
Checks proxy settings gathered from the environment, if specified, or
from the MacOSX framework SystemConfiguration.
"""
proxies = getproxies_environment()
if proxies:
return proxy_bypass_environment(host, proxies)
else:
return proxy_bypass_macosx_sysconf(host)
def getproxies():
return getproxies_environment() or getproxies_macosx_sysconf()
elif os.name == 'nt':
def getproxies_registry():
"""Return a dictionary of scheme -> proxy server URL mappings.
Win32 uses the registry to store proxies.
"""
proxies = {}
try:
import _winreg
except ImportError:
# Std module, so should be around - but you never know!
return proxies
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
if proxyEnable:
# Returned as Unicode but problems if not converted to ASCII
proxyServer = str(_winreg.QueryValueEx(internetSettings,
'ProxyServer')[0])
if '=' in proxyServer:
# Per-protocol settings
for p in proxyServer.split(';'):
protocol, address = p.split('=', 1)
# See if address has a type:// prefix
import re
if not re.match('^([^/:]+)://', address):
address = '%s://%s' % (protocol, address)
proxies[protocol] = address
else:
# Use one setting for all protocols
if proxyServer[:5] == 'http:':
proxies['http'] = proxyServer
else:
proxies['http'] = 'http://%s' % proxyServer
proxies['https'] = 'https://%s' % proxyServer
proxies['ftp'] = 'ftp://%s' % proxyServer
internetSettings.Close()
except (WindowsError, ValueError, TypeError):
# Either registry key not found etc, or the value in an
# unexpected format.
# proxies already set up to be empty so nothing to do
pass
return proxies
def getproxies():
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
return getproxies_environment() or getproxies_registry()
def proxy_bypass_registry(host):
try:
import _winreg
import re
except ImportError:
# Std modules, so should be around - but you never know!
return 0
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
proxyOverride = str(_winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0])
# ^^^^ Returned as Unicode but problems if not converted to ASCII
except WindowsError:
return 0
if not proxyEnable or not proxyOverride:
return 0
# try to make a host list from name and IP address.
rawHost, port = splitport(host)
host = [rawHost]
try:
addr = socket.gethostbyname(rawHost)
if addr != rawHost:
host.append(addr)
except socket.error:
pass
try:
fqdn = socket.getfqdn(rawHost)
if fqdn != rawHost:
host.append(fqdn)
except socket.error:
pass
# make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding
# canonical entry.
proxyOverride = proxyOverride.split(';')
# now check if we match one of the registry values.
for test in proxyOverride:
if test == '<local>':
if '.' not in rawHost:
return 1
test = test.replace(".", r"\.") # mask dots
test = test.replace("*", r".*") # change glob sequence
test = test.replace("?", r".") # change glob char
for val in host:
# print "%s <--> %s" %( test, val )
if re.match(test, val, re.I):
return 1
return 0
def proxy_bypass(host):
"""Return True, if the host should be bypassed.
Checks proxy settings gathered from the environment, if specified,
or the registry.
"""
proxies = getproxies_environment()
if proxies:
return proxy_bypass_environment(host, proxies)
else:
return proxy_bypass_registry(host)
else:
# By default use environment variables
getproxies = getproxies_environment
proxy_bypass = proxy_bypass_environment
# Test and time quote() and unquote()
def test1():
s = ''
for i in range(256): s = s + chr(i)
s = s*4
t0 = time.time()
qs = quote(s)
uqs = unquote(qs)
t1 = time.time()
if uqs != s:
print 'Wrong!'
print repr(s)
print repr(qs)
print repr(uqs)
print round(t1 - t0, 3), 'sec'
def reporthook(blocknum, blocksize, totalsize):
# Report during remote transfers
print "Block number: %d, Block size: %d, Total size: %d" % (
blocknum, blocksize, totalsize)
|
ByteInternet/libcloud | refs/heads/byte | libcloud/compute/drivers/ikoula.py | 64 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.compute.providers import Provider
from libcloud.compute.drivers.cloudstack import CloudStackNodeDriver
__all__ = [
'IkoulaNodeDriver'
]
class IkoulaNodeDriver(CloudStackNodeDriver):
type = Provider.IKOULA
name = 'Ikoula'
website = 'http://express.ikoula.co.uk/cloudstack'
# API endpoint info
host = 'cloudstack.ikoula.com'
path = '/client/api'
|
alanswanson/webserver | refs/heads/master | qa/069-PathInfo3.py | 8 | from base import *
PATH_INFO = "/param1/param2/param3"
CONF = """
vserver!1!rule!690!match = directory
vserver!1!rule!690!match!directory = /pathinfo3
vserver!1!rule!690!handler = cgi
"""
class Test (TestBase):
def __init__ (self):
TestBase.__init__ (self, __file__)
self.name = "PathInfo, cgi"
self.request = "GET /pathinfo3/test%s HTTP/1.0\r\n" %(PATH_INFO)
self.conf = CONF
self.expected_error = 200
self.expected_content = "PathInfo is: "+PATH_INFO
def Prepare (self, www):
self.Mkdir (www, "pathinfo3")
self.WriteFile (www, "pathinfo3/test", 0555,
"""#!/bin/sh
echo "Content-type: text/html"
echo ""
echo "PathInfo is: $PATH_INFO"
""")
|
darshanapdas/staging-next | refs/heads/master | tools/perf/util/setup.py | 989 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPIKFS')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
|
CSC-ORG/Dynamic-Dashboard-2015 | refs/heads/master | engine/lib/python2.7/site-packages/rest_framework/utils/encoders.py | 5 | """
Helper classes for parsers.
"""
from __future__ import unicode_literals
from django.db.models.query import QuerySet
from django.utils import six, timezone
from django.utils.encoding import force_text
from django.utils.functional import Promise
from rest_framework.compat import OrderedDict, total_seconds
from rest_framework.utils.serializer_helpers import ReturnDict, ReturnList
import datetime
import decimal
import types
import json
import uuid
class JSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time/timedelta,
decimal types, generators and other basic python objects.
"""
def default(self, obj):
# For Date Time string spec, see ECMA 262
# http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
if isinstance(obj, Promise):
return force_text(obj)
elif isinstance(obj, datetime.datetime):
representation = obj.isoformat()
if obj.microsecond:
representation = representation[:23] + representation[26:]
if representation.endswith('+00:00'):
representation = representation[:-6] + 'Z'
return representation
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.time):
if timezone and timezone.is_aware(obj):
raise ValueError("JSON can't represent timezone-aware times.")
representation = obj.isoformat()
if obj.microsecond:
representation = representation[:12]
return representation
elif isinstance(obj, datetime.timedelta):
return six.text_type(total_seconds(obj))
elif isinstance(obj, decimal.Decimal):
# Serializers will coerce decimals to strings by default.
return float(obj)
elif isinstance(obj, uuid.UUID):
return six.text_type(obj)
elif isinstance(obj, QuerySet):
return tuple(obj)
elif hasattr(obj, 'tolist'):
# Numpy arrays and array scalars.
return obj.tolist()
elif hasattr(obj, '__getitem__'):
try:
return dict(obj)
except:
pass
elif hasattr(obj, '__iter__'):
return tuple(item for item in obj)
return super(JSONEncoder, self).default(obj)
try:
import yaml
except ImportError:
SafeDumper = None
else:
# Adapted from http://pyyaml.org/attachment/ticket/161/use_ordered_dict.py
class SafeDumper(yaml.SafeDumper):
"""
Handles decimals as strings.
Handles OrderedDicts as usual dicts, but preserves field order, rather
than the usual behaviour of sorting the keys.
"""
def represent_decimal(self, data):
return self.represent_scalar('tag:yaml.org,2002:str', six.text_type(data))
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = yaml.MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
mapping = list(mapping.items())
if not isinstance(mapping, OrderedDict):
mapping.sort()
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
SafeDumper.add_representer(
decimal.Decimal,
SafeDumper.represent_decimal
)
SafeDumper.add_representer(
OrderedDict,
yaml.representer.SafeRepresenter.represent_dict
)
SafeDumper.add_representer(
ReturnDict,
yaml.representer.SafeRepresenter.represent_dict
)
SafeDumper.add_representer(
ReturnList,
yaml.representer.SafeRepresenter.represent_list
)
SafeDumper.add_representer(
types.GeneratorType,
yaml.representer.SafeRepresenter.represent_list
)
|
Rosy-S/twilio-python | refs/heads/master | twilio/rest/resources/applications.py | 48 | from . import InstanceResource, ListResource
class Application(InstanceResource):
""" An application resource """
def update(self, **kwargs):
"""
Update this application
"""
return self.parent.update(self.name, **kwargs)
def delete(self):
"""
Delete this application
"""
return self.parent.delete(self.name)
class Applications(ListResource):
name = "Applications"
instance = Application
def list(self, **kwargs):
"""
Returns a page of :class:`Application` resources as a list. For paging
information see :class:`ListResource`
:param date friendly_name: List applications with this friendly name
"""
return self.get_instances(kwargs)
def create(self, **kwargs):
"""
Create an :class:`Application` with any of these optional parameters.
:param friendly_name: A human readable description of the application,
with maximum length 64 characters.
:param api_version: Requests to this application's URLs will start a
new TwiML session with this API version.
Either 2010-04-01 or 2008-08-01.
:param voice_url: The URL that Twilio should request when somebody
dials a phone number assigned to this application.
:param voice_method: The HTTP method that should be used to request the
VoiceUrl. Either GET or POST.
:param voice_fallback_url: A URL that Twilio will request if an error
occurs requesting or executing the TwiML
defined by VoiceUrl.
:param voice_fallback_method: The HTTP method that should be used to
request the VoiceFallbackUrl. Either GET
or POST.
:param status_callback: The URL that Twilio will request to pass status
parameters (such as call ended) to your
application.
:param status_callback_method: The HTTP method Twilio will use to make
requests to the StatusCallback URL.
Either GET or POST.
:param voice_caller_id_lookup: Do a lookup of a caller's name from the
CNAM database and post it to your app.
Either true or false.
:param sms_url: The URL that Twilio should request when somebody sends
an SMS to a phone number assigned to this application.
:param sms_method: The HTTP method that should be used to request the
SmsUrl. Either GET or POST.
:param sms_fallback_url: A URL that Twilio will request if an error
occurs requesting or executing the TwiML
defined by SmsUrl.
:param sms_fallback_method: The HTTP method that should be used to
request the SmsFallbackUrl. Either GET
or POST.
:param sms_status_callback: Twilio will make a POST request to this URL
to pass status parameters (such as sent or
failed) to your application if you specify
this application's Sid as the
ApplicationSid on an outgoing SMS request.
"""
return self.create_instance(kwargs)
def update(self, sid, **kwargs):
"""
Update an :class:`Application` with the given parameters.
All the parameters are describe above in :meth:`create`
"""
return self.update_instance(sid, kwargs)
def delete(self, sid):
"""
Delete an :class:`Application`
"""
return self.delete_instance(sid)
|
tafaRU/odoo | refs/heads/8.0 | addons/l10n_fr_hr_payroll/l10n_fr_hr_payroll.py | 340 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
class res_company(osv.osv):
_inherit = 'res.company'
_columns = {
'plafond_secu': fields.float('Plafond de la Securite Sociale', digits_compute=dp.get_precision('Payroll')),
'nombre_employes': fields.integer('Nombre d\'employes'),
'cotisation_prevoyance': fields.float('Cotisation Patronale Prevoyance', digits_compute=dp.get_precision('Payroll')),
'org_ss': fields.char('Organisme de securite sociale'),
'conv_coll': fields.char('Convention collective'),
}
class hr_contract(osv.osv):
_inherit = 'hr.contract'
_columns = {
'qualif': fields.char('Qualification'),
'niveau': fields.char('Niveau'),
'coef': fields.char('Coefficient'),
}
class hr_payslip(osv.osv):
_inherit = 'hr.payslip'
_columns = {
'payment_mode': fields.char('Mode de paiement'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gangadhar-kadam/verve-erp | refs/heads/v5.0 | erpnext/accounts/report/gross_profit/gross_profit.py | 3 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, scrub
from frappe.utils import flt, cstr, cint
def execute(filters=None):
if not filters: filters = {}
gross_profit_data = GrossProfitGenerator(filters)
data = []
source = gross_profit_data.grouped_data if filters.get("group_by") != "Invoice" else gross_profit_data.data
group_wise_columns = frappe._dict({
"invoice": ["name", "posting_date", "posting_time", "item_code", "item_name", "brand", "description", \
"warehouse", "qty", "base_rate", "buying_rate", "base_amount",
"buying_amount", "gross_profit", "gross_profit_percent", "project"],
"item_code": ["item_code", "item_name", "brand", "description", "warehouse", "qty", "base_rate",
"buying_rate", "base_amount", "buying_amount", "gross_profit", "gross_profit_percent"],
"warehouse": ["warehouse", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"territory": ["territory", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"brand": ["brand", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"item_group": ["item_group", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"customer": ["customer", "customer_group", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"customer_group": ["customer_group", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"sales_person": ["sales_person", "allocated_amount", "qty", "base_rate", "buying_rate", "base_amount", "buying_amount",
"gross_profit", "gross_profit_percent"],
"project": ["project", "base_amount", "buying_amount", "gross_profit", "gross_profit_percent"],
"territory": ["territory", "base_amount", "buying_amount", "gross_profit", "gross_profit_percent"]
})
columns = get_columns(group_wise_columns, filters)
for src in source:
row = []
for col in group_wise_columns.get(scrub(filters.group_by)):
row.append(src.get(col))
data.append(row)
return columns, data
def get_columns(group_wise_columns, filters):
columns = []
column_map = frappe._dict({
"name": _("Sales Invoice") + "::120",
"posting_date": _("Posting Date") + ":Date",
"posting_time": _("Posting Time"),
"item_code": _("Item Code") + ":Link/Item",
"item_name": _("Item Name"),
"item_group": _("Item Group") + ":Link/Item",
"brand": _("Brand"),
"description": _("Description"),
"warehouse": _("Warehouse") + ":Link/Warehouse",
"qty": _("Qty") + ":Float",
"base_rate": _("Avg. Selling Rate") + ":Currency",
"buying_rate": _("Avg. Buying Rate") + ":Currency",
"base_amount": _("Selling Amount") + ":Currency",
"buying_amount": _("Buying Amount") + ":Currency",
"gross_profit": _("Gross Profit") + ":Currency",
"gross_profit_percent": _("Gross Profit %") + ":Percent",
"project": _("Project") + ":Link/Project",
"sales_person": _("Sales person"),
"allocated_amount": _("Allocated Amount") + ":Currency",
"customer": _("Customer") + ":Link/Customer",
"customer_group": _("Customer Group") + ":Link/Customer Group",
"territory": _("Territory") + ":Link/Territory"
})
for col in group_wise_columns.get(scrub(filters.group_by)):
columns.append(column_map.get(col))
return columns
class GrossProfitGenerator(object):
def __init__(self, filters=None):
self.data = []
self.filters = frappe._dict(filters)
self.load_invoice_items()
self.load_stock_ledger_entries()
self.load_sales_bom()
self.load_non_stock_items()
self.process()
def process(self):
self.grouped = {}
for row in self.si_list:
if self.skip_row(row, self.sales_boms):
continue
row.selling_amount = flt(row.base_amount)
sales_boms = self.sales_boms.get(row.parenttype, {}).get(row.name, frappe._dict())
# get buying amount
if row.item_code in sales_boms:
row.buying_amount = self.get_buying_amount_from_sales_bom(row, sales_boms[row.item_code])
else:
row.buying_amount = self.get_buying_amount(row, row.item_code)
# get buying rate
if row.qty:
row.buying_rate = (row.buying_amount / row.qty) * 100.0
else:
row.buying_rate = 0.0
# calculate gross profit
row.gross_profit = row.selling_amount - row.buying_amount
if row.selling_amount:
row.gross_profit_percent = (row.gross_profit / row.selling_amount) * 100.0
else:
row.gross_profit_percent = 0.0
# add to grouped
if self.filters.group_by != "Invoice":
self.grouped.setdefault(row.get(scrub(self.filters.group_by)), []).append(row)
self.data.append(row)
if self.grouped:
self.collapse_group()
else:
self.grouped_data = []
def collapse_group(self):
# sum buying / selling totals for group
self.grouped_data = []
for key in self.grouped.keys():
for i, row in enumerate(self.grouped[key]):
if i==0:
new_row = row
else:
new_row.qty += row.qty
new_row.buying_amount += row.buying_amount
new_row.selling_amount += row.selling_amount
# new_row.allocated_amount += (row.allocated_amount or 0) if new_row.allocated_amount else 0
new_row.gross_profit = new_row.selling_amount - new_row.buying_amount
new_row.gross_profit_percent = ((new_row.gross_profit / new_row.selling_amount) * 100.0) \
if new_row.selling_amount else 0
new_row.buying_rate = ((new_row.buying_amount / new_row.qty) * 100.0) \
if new_row.qty else 0
self.grouped_data.append(new_row)
def skip_row(self, row, sales_boms):
if cint(row.update_stock) == 0 and not row.dn_detail:
if row.item_code not in self.non_stock_items:
return True
elif row.item_code in sales_boms:
for child_item in sales_boms[row.item_code]:
if child_item not in self.non_stock_items:
return True
elif self.filters.get("group_by") != "Invoice" and not row.get(scrub(self.filters.get("group_by"))):
return True
def get_buying_amount_from_sales_bom(self, row, sales_bom):
buying_amount = 0.0
for bom_item in sales_bom[row.item_code]:
if bom_item.get("parent_detail_docname")==row.name:
buying_amount += self.get_buying_amount(row, bom_item.item_code)
return buying_amount
def get_buying_amount(self, row, item_code):
# IMP NOTE
# stock_ledger_entries should already be filtered by item_code and warehouse and
# sorted by posting_date desc, posting_time desc
if item_code in self.non_stock_items:
# average purchasing rate for non-stock items
item_rate = frappe.db.sql("""select sum(base_amount) / sum(qty)
from `tabPurchase Invoice Item`
where item_code = %s and docstatus=1""", item_code)
return flt(row.qty) * (flt(item_rate[0][0]) if item_rate else 0)
else:
if row.dn_detail:
row.parenttype = "Delivery Note"
row.parent = row.delivery_note
row.name = row.dn_detail
my_sle = self.sle.get((item_code, row.warehouse))
for i, sle in enumerate(my_sle):
# find the stock valution rate from stock ledger entry
if sle.voucher_type == row.parenttype and row.parent == sle.voucher_no and \
sle.voucher_detail_no == row.name:
previous_stock_value = len(my_sle) > i+1 and \
flt(my_sle[i+1].stock_value) or 0.0
return previous_stock_value - flt(sle.stock_value)
return 0.0
def load_invoice_items(self):
conditions = ""
if self.filters.company:
conditions += " and company = %(company)s"
if self.filters.from_date:
conditions += " and posting_date >= %(from_date)s"
if self.filters.to_date:
conditions += " and posting_date <= %(to_date)s"
self.si_list = frappe.db.sql("""select item.parenttype, si.name,
si.posting_date, si.posting_time, si.project_name, si.update_stock,
si.customer, si.customer_group, si.territory,
item.item_code, item.item_name, item.description, item.warehouse,
item.item_group, item.brand, item.dn_detail, item.delivery_note,
item.qty, item.base_rate, item.base_amount, item.name as "item_row",
sales.sales_person, sales.sales_designation, sales.allocated_amount,
sales.incentives
from `tabSales Invoice` si
inner join `tabSales Invoice Item` item on item.parent = si.name
left join `tabSales Team` sales on sales.parent = si.name
where
si.docstatus = 1 %s
order by
si.posting_date desc, si.posting_time desc""" % (conditions,), self.filters, as_dict=1)
def load_stock_ledger_entries(self):
res = frappe.db.sql("""select item_code, voucher_type, voucher_no,
voucher_detail_no, stock_value, warehouse, actual_qty as qty
from `tabStock Ledger Entry`
where company=%(company)s
order by
item_code desc, warehouse desc, posting_date desc,
posting_time desc, name desc""", self.filters, as_dict=True)
self.sle = {}
for r in res:
if (r.item_code, r.warehouse) not in self.sle:
self.sle[(r.item_code, r.warehouse)] = []
self.sle[(r.item_code, r.warehouse)].append(r)
def load_sales_bom(self):
self.sales_boms = {}
for d in frappe.db.sql("""select parenttype, parent, parent_item,
item_code, warehouse, -1*qty as total_qty, parent_detail_docname
from `tabPacked Item` where docstatus=1""", as_dict=True):
self.sales_boms.setdefault(d.parenttype, frappe._dict()).setdefault(d.parent,
frappe._dict()).setdefault(d.parent_item, []).append(d)
def load_non_stock_items(self):
self.non_stock_items = frappe.db.sql_list("""select name from tabItem
where ifnull(is_stock_item, 'No')='No'""")
|
devopshq/vspheretools | refs/heads/master | pysphere/version.py | 2 | # Do not edit. Auto generated
version = (0, 1, 8) |
ldengjie/myzsh | refs/heads/master | plugins/git-prompt/gitstatus.py | 343 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from subprocess import Popen, PIPE
import re
# change those symbols to whatever you prefer
symbols = {
'ahead of': '↑',
'behind': '↓',
'staged': '♦',
'changed': '‣',
'untracked': '…',
'clean': '⚡',
'unmerged': '≠',
'sha1': ':'
}
output, error = Popen(
['git', 'status'], stdout=PIPE, stderr=PIPE, universal_newlines=True).communicate()
if error:
import sys
sys.exit(0)
lines = output.splitlines()
behead_re = re.compile(
r"^# Your branch is (ahead of|behind) '(.*)' by (\d+) commit")
diverge_re = re.compile(r"^# and have (\d+) and (\d+) different")
status = ''
staged = re.compile(r'^# Changes to be committed:$', re.MULTILINE)
changed = re.compile(r'^# Changed but not updated:$', re.MULTILINE)
untracked = re.compile(r'^# Untracked files:$', re.MULTILINE)
unmerged = re.compile(r'^# Unmerged paths:$', re.MULTILINE)
def execute(*command):
out, err = Popen(stdout=PIPE, stderr=PIPE, *command).communicate()
if not err:
nb = len(out.splitlines())
else:
nb = '?'
return nb
if staged.search(output):
nb = execute(
['git', 'diff', '--staged', '--name-only', '--diff-filter=ACDMRT'])
status += '%s%s' % (symbols['staged'], nb)
if unmerged.search(output):
nb = execute(['git', 'diff', '--staged', '--name-only', '--diff-filter=U'])
status += '%s%s' % (symbols['unmerged'], nb)
if changed.search(output):
nb = execute(['git', 'diff', '--name-only', '--diff-filter=ACDMRT'])
status += '%s%s' % (symbols['changed'], nb)
if untracked.search(output):
status += symbols['untracked']
if status == '':
status = symbols['clean']
remote = ''
bline = lines[0]
if bline.find('Not currently on any branch') != -1:
branch = symbols['sha1'] + Popen([
'git',
'rev-parse',
'--short',
'HEAD'], stdout=PIPE).communicate()[0][:-1]
else:
branch = bline.split(' ')[-1]
bstatusline = lines[1]
match = behead_re.match(bstatusline)
if match:
remote = symbols[match.groups()[0]]
remote += match.groups()[2]
elif lines[2:]:
div_match = diverge_re.match(lines[2])
if div_match:
remote = "{behind}{1}{ahead of}{0}".format(
*div_match.groups(), **symbols)
print('\n'.join([branch, remote, status]))
|
cristian99garcia/devtutor-activity | refs/heads/master | modules.py | 1 | # Copyright (C) 2010 Kandarp Kaushik
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gettext import gettext as _
from gi.repository import Gtk
from gi.repository import Pango
from gi.repository import GObject
from sugar3.graphics.toolbarbox import ToolbarBox
from sugar3.activity.widgets import *
class ShowModules(GObject.GObject):
def __init__(self, canvas):
GObject.GObject.__init__(self)
self.set_canvas = canvas
def show_modules(self):
self.main_container = Gtk.VBox()
self.line1 = Gtk.HBox()
button1 = Gtk.Button("Activity")
self.line1.add(button1)
button1.connect('clicked', self.activity, None)
button1.show()
button1.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line2 = Gtk.HBox()
button2 = Gtk.Button("Graphics")
self.line2.add(button2)
button2.connect('clicked', self.graphics, None)
button2.show()
button2.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line3 = Gtk.HBox()
button3 = Gtk.Button("Bundle")
self.line3.add(button3)
button3.connect('clicked', self.bundle, None)
button3.show()
button3.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line4 = Gtk.HBox()
button4 = Gtk.Button("Datastore")
self.line4.add(button4)
button4.connect('clicked', self.datastore, None)
button4.show()
button4.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line5 = Gtk.HBox()
button5 = Gtk.Button("Toolkit")
button5.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line5.add(button5)
button5.connect('clicked', self.toolkit, None)
button5.show()
self.line6 = Gtk.HBox()
button6 = Gtk.Button("Dispatch")
self.line6.add(button6)
button6.connect('clicked', self.dispatch, None)
button6.show()
button6.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.main_container.add(self.line1)
self.main_container.add(self.line2)
self.main_container.add(self.line3)
self.main_container.add(self.line4)
self.main_container.add(self.line5)
self.main_container.add(self.line6)
self.line1.show()
self.line2.show()
self.line3.show()
self.line4.show()
self.line5.show()
self.line6.show()
self.set_canvas(self.main_container)
self.main_container.show()
def activity(self, sender, data=None):
self.main_container = Gtk.VBox()
self.line1 = Gtk.HBox()
button1 = Gtk.Button("Activity")
self.line1.add(button1)
# button1.connect('clicked', self.activity123, None)
button1.show()
button1.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line2 = Gtk.HBox()
button2 = Gtk.Button("Bundlebuilder")
self.line2.add(button2)
# button2.connect('clicked', self.bundlebuilder, None)
button2.show()
button2.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line3 = Gtk.HBox()
button3 = Gtk.Button("Factory")
self.line3.add(button3)
# button3.connect('clicked', self.factory, None)
button3.show()
button3.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line4 = Gtk.HBox()
button4 = Gtk.Button("Handles")
self.line4.add(button4)
# button4.connect('clicked', self.handles, None)
button4.show()
button4.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line5 = Gtk.HBox()
button5 = Gtk.Button("NamingAlert")
self.line5.add(button5)
# button5.connect('clicked', self.namingalert, None)
button5.show()
button5.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.line6 = Gtk.HBox()
button6 = Gtk.Button("Widgets")
self.line6.add(button6)
button6.connect('clicked', self.widgets, None)
button6.show()
button6.get_child().modify_font(Pango.FontDescription("Sans 18"))
self.main_container.add(self.line1)
self.main_container.add(self.line2)
self.main_container.add(self.line3)
self.main_container.add(self.line4)
self.main_container.add(self.line5)
self.main_container.add(self.line6)
self.line1.show()
self.line2.show()
self.line3.show()
self.line4.show()
self.line5.show()
self.line6.show()
self.set_canvas(self.main_container)
self.main_container.show()
def graphics(self, sender, data=None):
pass
def bundle(self, sender, data=None):
pass
def datastore(self, sender, data=None):
pass
def toolkit(self, sender, data=None):
pass
def dispatch(self, sender, data=None):
pass
def widgets(self, sender, data=None):
self.container = Gtk.VBox()
self.set_canvas(self.container)
self.heading()
self.add_line1()
# self.add_line2()
# self.add_line3()
# self.add_line4()
self.add_line5()
self.add_line6()
self.add_line7()
self.add_line8()
self.container.show()
def heading(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("Module/Method"))
self.line.add(self.label)
self.label.show()
self.label1 = Gtk.Label(_("GUI"))
self.line.add(self.label1)
self.label1.show()
self.label2 = Gtk.Label(_("Description"))
self.line.add(self.label2)
self.label2.show()
self.container.add(self.line)
self.line.show()
def add_line1(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.StopButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
title_entry1 = StopButton(self)
toolbar_box1.toolbar.insert(title_entry1, 0)
title_entry1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
def add_line2(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.ActivityButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
activity_button1 = ActivityButton(self)
toolbar_box1.toolbar.insert(activity_button1, 0)
activity_button1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
def add_line4(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.ShareButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
title_entry1 = ShareButton(self)
toolbar_box1.toolbar.insert(title_entry1, 0)
title_entry1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
def add_line5(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.RedoButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
title_entry1 = RedoButton()
toolbar_box1.toolbar.insert(title_entry1, 0)
title_entry1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
def add_line6(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.UndoButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
title_entry1 = UndoButton()
toolbar_box1.toolbar.insert(title_entry1, 0)
title_entry1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
def add_line7(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.CopyButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
title_entry1 = CopyButton()
toolbar_box1.toolbar.insert(title_entry1, 0)
title_entry1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
def add_line8(self):
self.line = Gtk.HBox()
self.label = Gtk.Label(_("sugar.activity.widgets.PasteButton()"))
self.line.add(self.label)
self.label.show()
toolbar_box1 = ToolbarBox()
self.line.add(toolbar_box1)
toolbar_box1.show()
title_entry1 = PasteButton()
toolbar_box1.toolbar.insert(title_entry1, 0)
title_entry1.show()
self.label1 = Gtk.Label(_("Some Description"))
self.line.add(self.label1)
self.label1.show()
self.container.add(self.line)
self.line.show()
|
hrishioa/Aviato | refs/heads/master | kartograph/kartograph/simplify/mpoint.py | 4 |
class MPoint:
"""
Point class used for polygon simplification
"""
def __init__(self, x, y):
self.x = x
self.y = y
self.simplified = False
self.deleted = False
self.keep = False
self.features = set()
def isDeletable(self):
if self.keep or self.simplified or self.three:
return False
return True
def __repr__(self):
return 'Pt(%.2f,%.2f)' % (self.x, self.y)
def __len__(self):
return 2
def __getitem__(self, key):
if key == 0:
return self.x
if key == 1:
return self.y
raise IndexError()
def __contains__(self, key):
if key == "deleted":
return True
return False
|
msarahan/bokeh | refs/heads/master | bokeh/server/protocol/messages/tests/test_patch_doc.py | 6 | from __future__ import absolute_import, print_function
import unittest
import bokeh.document as document
from bokeh.model import Model
from bokeh.core.properties import Int, Instance
from bokeh.server.protocol import Protocol
class AnotherModelInTestPatchDoc(Model):
bar = Int(1)
class SomeModelInTestPatchDoc(Model):
foo = Int(2)
child = Instance(Model)
class TestPatchDocument(unittest.TestCase):
def _sample_doc(self):
doc = document.Document()
another = AnotherModelInTestPatchDoc()
doc.add_root(SomeModelInTestPatchDoc(child=another))
doc.add_root(SomeModelInTestPatchDoc())
return doc
def test_create_model_changed(self):
sample = self._sample_doc()
obj = next(iter(sample.roots))
event = document.ModelChangedEvent(sample, obj, 'foo', obj.foo, 42, 42)
Protocol("1.0").create("PATCH-DOC", [event])
def test_create_then_apply_model_changed(self):
sample = self._sample_doc()
foos = []
for r in sample.roots:
foos.append(r.foo)
assert foos == [ 2, 2 ]
obj = next(iter(sample.roots))
assert obj.foo == 2
event = document.ModelChangedEvent(sample, obj, 'foo', obj.foo, 42, 42)
msg = Protocol("1.0").create("PATCH-DOC", [event])
copy = document.Document.from_json_string(sample.to_json_string())
msg.apply_to_document(copy)
foos = []
for r in copy.roots:
foos.append(r.foo)
foos.sort()
assert foos == [ 2, 42 ]
def test_should_suppress_model_changed(self):
sample = self._sample_doc()
root = None
other_root = None
for r in sample.roots:
if r.child is not None:
root = r
else:
other_root = r
assert root is not None
assert other_root is not None
new_child = AnotherModelInTestPatchDoc(bar=56)
# integer property changed
event1 = document.ModelChangedEvent(sample, root, 'foo', root.foo, 42, 42)
msg = Protocol("1.0").create("PATCH-DOC", [event1])
assert msg.should_suppress_on_change(event1)
assert not msg.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'foo', root.foo, 43, 43))
assert not msg.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'bar', root.foo, 43, 43))
assert not msg.should_suppress_on_change(document.ModelChangedEvent(sample, other_root, 'foo', root.foo, 43, 43))
# Model property changed
event2 = document.ModelChangedEvent(sample, root, 'child', root.child, new_child, new_child)
msg2 = Protocol("1.0").create("PATCH-DOC", [event2])
assert msg2.should_suppress_on_change(event2)
assert not msg2.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'child', root.child, other_root, other_root))
assert not msg2.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'blah', root.child, new_child, new_child))
assert not msg2.should_suppress_on_change(document.ModelChangedEvent(sample, other_root, 'child', other_root.child, new_child, new_child))
# Model property changed to None
event3 = document.ModelChangedEvent(sample, root, 'child', root.child, None, None)
msg3 = Protocol("1.0").create("PATCH-DOC", [event3])
assert msg3.should_suppress_on_change(event3)
assert not msg3.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'child', root.child, other_root, other_root))
assert not msg3.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'blah', root.child, None, None))
assert not msg3.should_suppress_on_change(document.ModelChangedEvent(sample, other_root, 'child', other_root.child, None, None))
# Model property changed from None
event4 = document.ModelChangedEvent(sample, other_root, 'child', other_root.child, None, None)
msg4 = Protocol("1.0").create("PATCH-DOC", [event4])
assert msg4.should_suppress_on_change(event4)
assert not msg4.should_suppress_on_change(document.ModelChangedEvent(sample, other_root, 'child', other_root.child, root, root))
assert not msg4.should_suppress_on_change(document.ModelChangedEvent(sample, other_root, 'blah', other_root.child, None, None))
assert not msg4.should_suppress_on_change(document.ModelChangedEvent(sample, root, 'child', other_root.child, None, None))
# RootAdded
event5 = document.RootAddedEvent(sample, root)
msg5 = Protocol("1.0").create("PATCH-DOC", [event5])
assert msg5.should_suppress_on_change(event5)
assert not msg5.should_suppress_on_change(document.RootAddedEvent(sample, other_root))
assert not msg5.should_suppress_on_change(document.RootRemovedEvent(sample, root))
# RootRemoved
event6 = document.RootRemovedEvent(sample, root)
msg6 = Protocol("1.0").create("PATCH-DOC", [event6])
assert msg6.should_suppress_on_change(event6)
assert not msg6.should_suppress_on_change(document.RootRemovedEvent(sample, other_root))
assert not msg6.should_suppress_on_change(document.RootAddedEvent(sample, root))
# ColumnsStreamed
event7 = document.ModelChangedEvent(sample, root, 'data', 10, None, None,
hint=document.ColumnsStreamedEvent(sample, root, {}, None))
msg7 = Protocol("1.0").create("PATCH-DOC", [event7])
assert msg7.should_suppress_on_change(event7)
assert not msg7.should_suppress_on_change(
document.ModelChangedEvent(sample, root, 'data', 10, None, None,
hint=document.ColumnsStreamedEvent(sample, root, {}, 10))
)
assert not msg7.should_suppress_on_change(
document.ModelChangedEvent(sample, root, 'data', 10, None, None,
hint=document.ColumnsStreamedEvent(sample, root, {"a": [10]}, None))
)
assert not msg7.should_suppress_on_change(
document.ModelChangedEvent(sample, root, 'data', 10, None, None,
hint=document.ColumnsStreamedEvent(sample, other_root, {}, None))
)
|
MortimerGoro/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/py/testing/process/test_forkedfunc.py | 162 | import pytest
import py, sys, os
pytestmark = py.test.mark.skipif("not hasattr(os, 'fork')")
def test_waitfinish_removes_tempdir():
ff = py.process.ForkedFunc(boxf1)
assert ff.tempdir.check()
ff.waitfinish()
assert not ff.tempdir.check()
def test_tempdir_gets_gc_collected(monkeypatch):
monkeypatch.setattr(os, 'fork', lambda: os.getpid())
ff = py.process.ForkedFunc(boxf1)
assert ff.tempdir.check()
ff.__del__()
assert not ff.tempdir.check()
def test_basic_forkedfunc():
result = py.process.ForkedFunc(boxf1).waitfinish()
assert result.out == "some out\n"
assert result.err == "some err\n"
assert result.exitstatus == 0
assert result.signal == 0
assert result.retval == 1
def test_exitstatus():
def func():
os._exit(4)
result = py.process.ForkedFunc(func).waitfinish()
assert result.exitstatus == 4
assert result.signal == 0
assert not result.out
assert not result.err
def test_execption_in_func():
def fun():
raise ValueError(42)
ff = py.process.ForkedFunc(fun)
result = ff.waitfinish()
assert result.exitstatus == ff.EXITSTATUS_EXCEPTION
assert result.err.find("ValueError: 42") != -1
assert result.signal == 0
assert not result.retval
def test_forkedfunc_on_fds():
result = py.process.ForkedFunc(boxf2).waitfinish()
assert result.out == "someout"
assert result.err == "someerr"
assert result.exitstatus == 0
assert result.signal == 0
assert result.retval == 2
def test_forkedfunc_on_fds_output():
result = py.process.ForkedFunc(boxf3).waitfinish()
assert result.signal == 11
assert result.out == "s"
def test_forkedfunc_on_stdout():
def boxf3():
import sys
sys.stdout.write("hello\n")
os.kill(os.getpid(), 11)
result = py.process.ForkedFunc(boxf3).waitfinish()
assert result.signal == 11
assert result.out == "hello\n"
def test_forkedfunc_signal():
result = py.process.ForkedFunc(boxseg).waitfinish()
assert result.retval is None
if sys.version_info < (2,4):
py.test.skip("signal detection does not work with python prior 2.4")
assert result.signal == 11
def test_forkedfunc_huge_data():
result = py.process.ForkedFunc(boxhuge).waitfinish()
assert result.out
assert result.exitstatus == 0
assert result.signal == 0
assert result.retval == 3
def test_box_seq():
# we run many boxes with huge data, just one after another
for i in range(50):
result = py.process.ForkedFunc(boxhuge).waitfinish()
assert result.out
assert result.exitstatus == 0
assert result.signal == 0
assert result.retval == 3
def test_box_in_a_box():
def boxfun():
result = py.process.ForkedFunc(boxf2).waitfinish()
print (result.out)
sys.stderr.write(result.err + "\n")
return result.retval
result = py.process.ForkedFunc(boxfun).waitfinish()
assert result.out == "someout\n"
assert result.err == "someerr\n"
assert result.exitstatus == 0
assert result.signal == 0
assert result.retval == 2
def test_kill_func_forked():
class A:
pass
info = A()
import time
def box_fun():
time.sleep(10) # we don't want to last forever here
ff = py.process.ForkedFunc(box_fun)
os.kill(ff.pid, 15)
result = ff.waitfinish()
if py.std.sys.version_info < (2,4):
py.test.skip("signal detection does not work with python prior 2.4")
assert result.signal == 15
def test_hooks(monkeypatch):
def _boxed():
return 1
def _on_start():
sys.stdout.write("some out\n")
sys.stdout.flush()
def _on_exit():
sys.stderr.write("some err\n")
sys.stderr.flush()
result = py.process.ForkedFunc(_boxed, child_on_start=_on_start,
child_on_exit=_on_exit).waitfinish()
assert result.out == "some out\n"
assert result.err == "some err\n"
assert result.exitstatus == 0
assert result.signal == 0
assert result.retval == 1
# ======================================================================
# examples
# ======================================================================
#
def boxf1():
sys.stdout.write("some out\n")
sys.stderr.write("some err\n")
return 1
def boxf2():
os.write(1, "someout".encode('ascii'))
os.write(2, "someerr".encode('ascii'))
return 2
def boxf3():
os.write(1, "s".encode('ascii'))
os.kill(os.getpid(), 11)
def boxseg():
os.kill(os.getpid(), 11)
def boxhuge():
s = " ".encode('ascii')
os.write(1, s * 10000)
os.write(2, s * 10000)
os.write(1, s * 10000)
os.write(1, s * 10000)
os.write(2, s * 10000)
os.write(2, s * 10000)
os.write(1, s * 10000)
return 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.