code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
import os
def parse_distributions_h(ffi, inc_dir):
"""
Parse distributions.h located in inc_dir for CFFI, filling in the ffi.cdef
Read the function declarations without the "#define ..." macros that will
be filled in when loading the library.
"""
with open(os.path.join(inc_dir, 'random', 'bitgen.h')) as fid:
s = []
for line in fid:
# massage the include file
if line.strip().startswith('#'):
continue
s.append(line)
ffi.cdef('\n'.join(s))
with open(os.path.join(inc_dir, 'random', 'distributions.h')) as fid:
s = []
in_skip = 0
for line in fid:
# massage the include file
if line.strip().startswith('#'):
continue
# skip any inlined function definition
# which starts with 'static NPY_INLINE xxx(...) {'
# and ends with a closing '}'
if line.strip().startswith('static NPY_INLINE'):
in_skip += line.count('{')
continue
elif in_skip > 0:
in_skip += line.count('{')
in_skip -= line.count('}')
continue
# replace defines with their value or remove them
line = line.replace('DECLDIR', '')
line = line.replace('NPY_INLINE', '')
line = line.replace('RAND_INT_TYPE', 'int64_t')
s.append(line)
ffi.cdef('\n'.join(s))
| WarrenWeckesser/numpy | numpy/random/_examples/cffi/parse.py | Python | bsd-3-clause | 1,515 |
#!/usr/bin/env python
"""
fasta_average_length.py
Author: Tony Papenfuss
Date: Wed Mar 26 15:03:28 EST 2008
"""
import sys
from mungo.fasta import FastaFile
from optparse import OptionParser
usage = "%prog <Input file1> [<Input file2> ...]"
parser = OptionParser(usage=usage, version="%prog - Version 1")
options, args = parser.parse_args()
if len(args)==0:
parser.print_help()
sys.exit()
for filename in args:
n = 0
average = 0L
for h,s in FastaFile(filename):
n += 1
average += len(s)
print '%s\t%i' % (filename, float(average)/n)
| PapenfussLab/Mungo | bin/fasta_average_length.py | Python | artistic-2.0 | 579 |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
import re
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
log = logging.getLogger('twitterfeed')
# Size of the chunks when fetching a timeline
CHUNK_SIZE = 200
# Maximum number of tweets to fetch no matter how (if there is no
# since_id for example or a too old since_id)
MAX_TWEETS = 1000
class TwitterFeed(object):
"""Parses a twitter feed
Example::
twitterfeed:
account: <account>
consumer_key: <consumer_key>
consumer_secret: <consumer_secret>
access_token_key: <access_token_key>
access_token_secret: <access_token_secret>
By default, the 50 last tweets are fetched corresponding to the option:
all_entries: yes
To change that default number:
tweets: 75
Beware that Twitter only allows 300 requests during a 15 minutes
window.
If you want to process only new tweets:
all_entries: no
That option's behaviour is changed if the corresponding task's
configuration has been changed. In that case, new tweets are
fetched and if there are no more than `tweets`, older ones are
fetched to have `tweets` of them in total.
"""
schema = {
'type': 'object',
'properties': {
'account': {'type': 'string'},
'consumer_key': {'type': 'string'},
'consumer_secret': {'type': 'string'},
'access_token_key': {'type': 'string'},
'access_token_secret': {'type': 'string'},
'all_entries': {'type': 'boolean', 'default': True},
'tweets': {'type': 'number', 'default': 50},
},
'required': [
'account',
'consumer_key',
'consumer_secret',
'access_token_secret',
'access_token_key',
],
'additionalProperties': False,
}
def on_task_start(self, task, config):
try:
import twitter # noqa
except ImportError:
raise plugin.PluginError('twitter module required', logger=log)
def on_task_input(self, task, config):
import twitter
account = config['account']
log.debug('Looking at twitter account `%s`', account)
try:
self.api = twitter.Api(
consumer_key=config['consumer_key'],
consumer_secret=config['consumer_secret'],
access_token_key=config['access_token_key'],
access_token_secret=config['access_token_secret'],
)
except twitter.TwitterError as ex:
raise plugin.PluginError(
'Unable to authenticate to twitter for task %s: %s' % (task.name, ex)
)
if config['all_entries']:
log.debug(
'Fetching %d last tweets from %s timeline' % (config['tweets'], config['account'])
)
tweets = self.get_tweets(account, number=config['tweets'])
else:
# Fetching from where we left off last time
since_id = task.simple_persistence.get('since_id', None)
if since_id:
log.debug(
'Fetching from tweet id %d from %s timeline' % (since_id, config['account'])
)
kwargs = {'since_id': since_id}
else:
log.debug('No since_id, fetching last %d tweets' % config['tweets'])
kwargs = {'number': config['tweets']}
tweets = self.get_tweets(account, **kwargs)
if task.config_modified and len(tweets) < config['tweets']:
log.debug('Configuration modified; fetching at least %d tweets' % config['tweets'])
max_id = tweets[-1].id if tweets else None
remaining_tweets = config['tweets'] - len(tweets)
tweets = tweets + self.get_tweets(account, max_id=max_id, number=remaining_tweets)
if tweets:
last_tweet = tweets[0]
log.debug('New last tweet id: %d' % last_tweet.id)
task.simple_persistence['since_id'] = last_tweet.id
log.debug('%d tweets fetched' % len(tweets))
for t in tweets:
log.debug('id:%d' % t.id)
return [self.entry_from_tweet(e) for e in tweets]
def get_tweets(self, account, number=MAX_TWEETS, since_id=None, max_id=None):
"""Fetch tweets from twitter account `account`."""
import twitter
all_tweets = []
while number > 0:
try:
tweets = self.api.GetUserTimeline(
screen_name=account,
include_rts=False,
exclude_replies=True,
count=min(number, CHUNK_SIZE),
since_id=since_id,
max_id=max_id,
)
except twitter.TwitterError as e:
raise plugin.PluginError('Unable to fetch timeline %s for %s' % (account, e))
if not tweets:
break
all_tweets += tweets
number -= len(tweets)
max_id = tweets[-1].id - 1
return all_tweets
def entry_from_tweet(self, tweet):
new_entry = Entry()
new_entry['title'] = tweet.text
urls = re.findall(r'(https?://\S+)', tweet.text)
new_entry['urls'] = urls
if urls:
new_entry['url'] = urls[0]
return new_entry
@event('plugin.register')
def register_plugin():
plugin.register(TwitterFeed, 'twitterfeed', api_ver=2)
| gazpachoking/Flexget | flexget/plugins/input/twitterfeed.py | Python | mit | 5,716 |
"""
Expand image metadata
"""
log = None
from imex.config import ConfigManager
from imex.rules import RuleManager
from imex.metadataeditor import MetadataEditor
from imex.logger import SimpleScreenLogger
from imex.metadata import ImageMetadata, Tag
| jdevera/imex | src/imex/__init__.py | Python | mit | 252 |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
assert repr(1j) == "1j"
assert repr(complex()) == "0j"
assert repr(complex('nan-nanj')) == '(nan+nanj)'
assert repr(complex('-Nan+NaNj')) == '(nan+nanj)'
assert repr(complex('inf-infj')) == '(inf-infj)'
assert repr(complex('+inf+infj')) == '(inf+infj)'
assert repr(complex('-infINIty+infinityj')) == '(-inf+infj)'
assert complex(1.8456e3) == (1845.6+0j)
assert complex('1.8456e3') == (1845.6+0j)
assert complex(0, -365.12) == -365.12j
assert complex('-365.12j') == -365.12j
assert complex(-1.23E2, -45.678e1) == (-123-456.78j)
assert complex('-1.23e2-45.678e1j') == (-123-456.78j)
assert complex(21.98, -1) == (21.98-1j)
assert complex('21.98-j') == (21.98-1j)
assert complex('-j') == -1j
assert complex('+j') == 1j
assert complex('j') == 1j
assert complex(' \t \n \r ( \t \n \r 2.1-3.4j \t \n \r ) \t \n \r ') == (2.1-3.4j)
assert complex(complex(complex(3.14))) == (3.14+0j)
assert complex(complex(1, -2), .151692) == (1-1.848308j)
assert complex(complex(3.14), complex(-0.151692)) == (3.14-0.151692j)
assert complex(complex(-1, 2), complex(3, -4)) == (3+5j)
try:
complex('((2.1-3.4j))')
except ValueError as e:
assert str(e) == "complex() arg is a malformed string"
else:
raise AssertionError('this was supposed to raise an exception')
try:
complex('3.14 - 15.16 j')
except ValueError as e:
assert str(e) == "complex() arg is a malformed string"
else:
raise AssertionError('this was supposed to raise an exception')
try:
complex('foo')
except ValueError as e:
assert str(e) == "complex() arg is a malformed string"
else:
raise AssertionError('this was supposed to raise an exception')
try:
complex('foo', 1)
except TypeError as e:
assert str(e) == "complex() can't take second arg if first is a string"
else:
raise AssertionError('this was supposed to raise an exception')
try:
complex(1, 'bar')
except TypeError as e:
assert str(e) == "complex() second arg can't be a string"
else:
raise AssertionError('this was supposed to raise an exception')
# __nonzero__
assert complex(0, 0).__nonzero__() == False
assert complex(.0, .0).__nonzero__() == False
assert complex(0.0, 0.1).__nonzero__() == True
assert complex(1, 0).__nonzero__() == True
assert complex(3.14, -0.001e+5).__nonzero__() == True
assert complex(float('nan'), float('nan')).__nonzero__() == True
assert complex(-float('inf'), float('inf')).__nonzero__() == True
# __pos__
assert complex(0, 0).__pos__() == 0j
assert complex(42, -0.1).__pos__() == (42-0.1j)
assert complex(-1.2, 375E+2).__pos__() == (-1.2+37500j)
assert repr(complex(5, float('nan')).__pos__()) == '(5+nanj)'
assert repr(complex(float('inf'), 0.618).__pos__()) == '(inf+0.618j)' | google/grumpy | testing/complex_test.py | Python | apache-2.0 | 3,248 |
import sys
import socket
from socket import AF_INET, SOCK_STREAM
def make_ip(lan_ip, end_part):
lan_parts = lan_ip.split(".")
parts = lan_parts[: 3] + [end_part]
return ".".join(parts)
def scan_miftp_ip(lan_ip, timeout):
ftp_ips = []
ip_parts = lan_ip.split(".")
for i in range(0, 256):
try_ip_parts = ip_parts[: 3] + [str(i)]
try_ip = ".".join(try_ip_parts)
try:
sys.stdout.write("%s/256\r" % (i + 1))
sys.stdout.flush()
tcpCliSock = socket.socket(AF_INET, SOCK_STREAM)
tcpCliSock.settimeout(timeout)
tcpCliSock.connect((try_ip, 2121))
tcpCliSock.close()
del tcpCliSock
ftp_ips.append(try_ip)
except:
continue
return ftp_ips
| shishusaiwai/miftpclient | miftpclient/scan.py | Python | gpl-3.0 | 799 |
#
# Copyright 2015 Simulmedia, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import os
import re
import shutil
import socket
import tarfile
import tempfile
import time
from contextlib import closing
from distutils import spawn
from os.path import expanduser
from subprocess import Popen
import psycopg2
import requests
from psycopg2._psycopg import OperationalError
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
logger = logging.getLogger('pyembedpg')
class PyEmbedPg(object):
DOWNLOAD_BASE_URL = 'http://ftp.postgresql.org/pub/source'
DOWNLOAD_URL = DOWNLOAD_BASE_URL + '/v{version}/postgresql-{version}.tar.bz2'
LOCAL_VERSION = 'local'
CACHE_DIRECTORY = '.pyembedpg'
def __init__(self, version=None):
"""
Initialize a new Postgres object
:param version: version to use. If it is not set, use the latest version in .pyembedpg directory. If not present
use the latest version remotely. Use 'local' to use the local postgres version installed on the machine
:return:
"""
home_dir = expanduser("~")
self._cache_dir = os.path.join(home_dir, PyEmbedPg.CACHE_DIRECTORY)
# if version is not specified, check local last version otherwise get last remote version
self.version = version
if not self.version:
self.version = self.get_latest_local_version()
if not self.version:
self.version = self.get_latest_remote_version()
if version == PyEmbedPg.LOCAL_VERSION:
full_path = spawn.find_executable('postgres')
if not full_path:
raise PyEmbedPgException('Cannot find postgres executable. Make sure it is in your path')
self._version_path = os.path.dirname(full_path)
else:
self._version_path = os.path.join(self._cache_dir, self.version)
def get_latest_local_version(self):
"""
Return the latest version installed in the cache
:return: latest version installed locally in the cache and None if there is nothing downloaded
"""
if not os.path.exists(self._cache_dir):
return None
tags = os.listdir(self._cache_dir)
# we want to sort based on numbers so:
# v3.0.0-QA6
# v3.0.0-QA15
# v3.0.0-QA2
# are sorted according to the numbers so no lexigraphically
revs_to_tag = [(re.split(r"[^\d]+", tag), tag) for tag in tags]
return max(revs_to_tag)[1]
def get_latest_remote_version(self):
"""
Return the latest version on the Postgres FTP server
:return: latest version installed locally on the Postgres FTP server
"""
response = requests.get(PyEmbedPg.DOWNLOAD_BASE_URL)
last_version_match = list(re.finditer('>v(?P<version>[^<]+)<', response.content.decode()))[-1]
return last_version_match.group('version')
def check_version_present(self):
"""
Check if the version is present in the cache
:return: True if the version has already been downloaded and build, False otherwise
"""
return os.path.exists(self._version_path)
def download_and_unpack(self):
# if the version we want to download already exists, do not do anything
if self.check_version_present():
logger.debug('Version {version} already present in cache'.format(version=self.version))
return
url = PyEmbedPg.DOWNLOAD_URL.format(version=self.version)
response = requests.get(url, stream=True)
if not response.ok:
raise PyEmbedPgException('Cannot download file {url}. Error: {error}'.format(url=url, error=response.content))
with tempfile.NamedTemporaryFile() as fd:
logger.debug('Downloading {url}'.format(url=url))
for block in response.iter_content(chunk_size=4096):
fd.write(block)
fd.flush()
# Unpack the file into temporary dir
temp_dir = tempfile.mkdtemp()
source_dir = os.path.join(temp_dir, 'postgresql-{version}'.format(version=self.version))
try:
# Can't use with context directly because of python 2.6
with closing(tarfile.open(fd.name)) as tar:
tar.extractall(temp_dir)
os.system(
'sh -c "cd {path} && ./configure --prefix={target_dir} && make install && cd contrib && make install"'.format(
path=source_dir,
target_dir=self._version_path)
)
finally:
shutil.rmtree(temp_dir, ignore_errors=True)
def start(self, port=5432):
"""
Start a new Postgres server on the specified port
:param port: port to connect to, can be an int or a list of ports
:return:
"""
if not self.check_version_present():
self.download_and_unpack()
bin_dir = os.path.join(self._version_path, 'bin')
ports = [port] if isinstance(port, int) else port
return DatabaseRunner(bin_dir, ports)
class DatabaseRunner(object):
ADMIN_USER = 'root'
TIMEOUT = 10
def __init__(self, bin_dir, ports):
self._ports = ports
self._postgres_cmd = os.path.join(bin_dir, 'postgres')
# init db
init_db = os.path.join(bin_dir, 'initdb')
self._temp_dir = tempfile.mkdtemp()
command = init_db + ' -D ' + self._temp_dir + ' -U ' + DatabaseRunner.ADMIN_USER
logger.debug('Running command: {command}'.format(command=command))
os.system(command)
# overwrite pg_hba.conf to only allow local access with password authentication
with open(os.path.join(self._temp_dir, 'pg_hba.conf'), 'w') as fd:
fd.write(
'# TYPE DATABASE USER ADDRESS METHOD\n'
'# "local" is for Unix domain socket connections only\n'
'local all {admin} trust\n'
'local all all md5\n'
'host all {admin} 127.0.0.1/32 trust\n'
'host all all 127.0.0.1/32 md5\n'
'# IPv6 local connections:\n'
'host all {admin} ::1/128 trust\n'
'host all all ::1/128 md5\n'.format(admin=DatabaseRunner.ADMIN_USER)
)
def can_connect(port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
return sock.connect_ex(('127.0.0.1', port)) != 0
self.running_port = next((port for port in ports if can_connect(port)), None)
if self.running_port is None:
raise PyEmbedPgException('Cannot run postgres on any of these ports [{ports}]'.format(ports=', '.join((str(p) for p in ports))))
self.proc = Popen([self._postgres_cmd, '-D', self._temp_dir, '-p', str(self.running_port)])
logger.debug('Postgres started on port {port}...'.format(port=self.running_port))
# Loop until the server is started
logger.debug('Waiting for Postgres to start...')
start = time.time()
while time.time() - start < DatabaseRunner.TIMEOUT:
try:
with psycopg2.connect(database='postgres', user=DatabaseRunner.ADMIN_USER, host='localhost', port=self.running_port):
break
except OperationalError:
pass
time.sleep(0.1)
else:
raise PyEmbedPgException('Cannot start postgres after {timeout} seconds'.format(timeout=DatabaseRunner.TIMEOUT))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown()
def create_user(self, username, password):
"""Create a user
:param username:
:type username: basestring
:param password:
:type password: basestring
"""
with psycopg2.connect(database='postgres', user=DatabaseRunner.ADMIN_USER, host='localhost', port=self.running_port) as conn:
with conn.cursor() as cursor:
cursor.execute("CREATE USER {username} WITH ENCRYPTED PASSWORD '{password}'".format(username=username, password=password))
def create_database(self, name, owner=None):
"""Create a new database
:param name: database name
:type name: basestring
:param owner: username of the owner or None if unspecified
:type owner: basestring
"""
with psycopg2.connect(database='postgres', user=DatabaseRunner.ADMIN_USER, host='localhost', port=self.running_port) as conn:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
with conn.cursor() as cursor:
sql = 'CREATE DATABASE {name} ' + ('WITH OWNER {owner}' if owner else '')
cursor.execute(sql.format(name=name, owner=owner))
def shutdown(self):
"""
Shutdown postgres and remove the data directory
"""
# stop pg
try:
logger.debug('Killing postgres on port {port}'.format(port=self.running_port))
self.proc.kill()
os.waitpid(self.proc.pid, 0)
finally:
logger.debug('Removing postgres data dir on {dir}'.format(dir=self._temp_dir))
# remove data directory
shutil.rmtree(self._temp_dir, ignore_errors=True)
class PyEmbedPgException(Exception):
pass
| Simulmedia/pyembedpg | pyembedpg.py | Python | apache-2.0 | 10,244 |
import smtplib
from email.message import EmailMessage
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message_body, from_email, to_emails):
'''
Create an email message using pythons smtplib and EmailMessage.
It returns a dictionary, with one entry for each recipient that was
refused. Each entry contains a tuple of the SMTP error code and the
accompanying error message sent by the server.
Args:
subject: a string for the subject line
message_body: a string for the email message body
from_email: who the email is coming from
to_emails: either a string or list of emails address being sent to
'''
if settings.QIPR_EMAIL_DEBUG:
return send_mail(subject,message_body,from_email, __get_list(to_emails))
msg = __create_message(subject, message_body, from_email, to_emails)
return __send(msg)
def __connect_to_smtp():
'''
Using Django settings and smtplib, create an smtplib SMTP instance
https://docs.python.org/3/library/smtplib.html#smtplib.SMTP
'''
connection = smtplib.SMTP(host=settings.QIPR_EMAIL_HOST,
port=settings.QIPR_EMAIL_PORT,
local_hostname=settings.QIPR_EMAIL_HOSTNAME)
return connection
def __send(message):
connection = __connect_to_smtp()
failures = {}
try:
failures = connection.send_message(message)
except smtplib.SMTPRecipientsRefused as err:
#If no emails are sent
failures = err.recipients
connection.quit()
return failures
def __create_message(subject, message_body, from_email, to_emails):
'''
Take the subject, message body, from email, and to emails list and create
a message object to send off.
'''
msg = EmailMessage()
msg.set_content(message_body)
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = __get_list(to_emails)
return msg
def __get_list(string_or_list):
'''
Take a string and return a list
'''
if isinstance(string_or_list, list):
return string_or_list
try:
return string_or_list.split()
except AttributeError:
return []
| DevMattM/qipr_approver | qipr_approver/approver/utilities/send_email.py | Python | apache-2.0 | 2,211 |
"""winsdk
Tool-specific initialization for Microsoft Windows SDK.
"""
#
# Copyright (c) 2001-2007 The SCons Foundation
# Copyright (c) 2008 Tungsten Graphics, Inc.
# Copyright (c) 2009 VMware, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os.path
import platform
import SCons.Errors
import SCons.Util
import msvc_sa
import mslib_sa
import mslink_sa
def get_vs_root(env):
# TODO: Check HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\VisualStudio\SxS\VS7
path = os.path.join(os.getenv('ProgramFiles', r'C:\Program Files'), 'Microsoft Visual Studio 9.0')
return path
def get_vs_paths(env):
vs_root = get_vs_root(env)
if vs_root is None:
raise SCons.Errors.InternalError, "WINSDK compiler not found"
tool_path = os.path.join(vs_root, 'Common7', 'IDE')
env.PrependENVPath('PATH', tool_path)
def get_vc_root(env):
# TODO: Check HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\VisualStudio\SxS\VC7
path = os.path.join(os.getenv('ProgramFiles', r'C:\Program Files'), 'Microsoft Visual Studio 9.0', 'VC')
return path
def get_vc_paths(env):
vc_root = get_vc_root(env)
if vc_root is None:
raise SCons.Errors.InternalError, "WINSDK compiler not found"
target_cpu = env['machine']
if target_cpu in ('generic', 'x86'):
bin_dir = 'bin'
lib_dir = 'lib'
elif target_cpu == 'x86_64':
# TODO: take in consideration the host cpu
bin_dir = r'bin\x86_amd64'
lib_dir = r'lib\amd64'
else:
raise SCons.Errors.InternalError, "Unsupported target machine"
include_dir = 'include'
env.PrependENVPath('PATH', os.path.join(vc_root, bin_dir))
env.PrependENVPath('INCLUDE', os.path.join(vc_root, include_dir))
env.PrependENVPath('LIB', os.path.join(vc_root, lib_dir))
def get_sdk_root(env):
if SCons.Util.can_read_reg:
key = r'SOFTWARE\Microsoft\Microsoft SDKs\Windows\CurrentInstallFolder'
try:
path, t = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, key)
except SCons.Util.RegError:
pass
else:
return path
return None
def get_sdk_paths(env):
sdk_root = get_sdk_root(env)
if sdk_root is None:
raise SCons.Errors.InternalError, "WINSDK not found"
target_cpu = env['machine']
bin_dir = 'Bin'
if target_cpu in ('generic', 'x86'):
lib_dir = 'Lib'
elif target_cpu == 'x86_64':
lib_dir = r'Lib\x64'
else:
raise SCons.Errors.InternalError, "Unsupported target machine"
include_dir = 'Include'
env.PrependENVPath('PATH', os.path.join(sdk_root, bin_dir))
env.PrependENVPath('INCLUDE', os.path.join(sdk_root, include_dir))
env.PrependENVPath('LIB', os.path.join(sdk_root, lib_dir))
def generate(env):
if not env.has_key('ENV'):
env['ENV'] = {}
get_vs_paths(env)
get_vc_paths(env)
get_sdk_paths(env)
msvc_sa.generate(env)
mslib_sa.generate(env)
mslink_sa.generate(env)
def exists(env):
return get_vc_root(env) is not None and get_sdk_root(env) is not None
# vim:set ts=4 sw=4 et:
| ayoubg/gem5-graphics | Mesa-7.11.2_GPGPU-Sim/scons/winsdk.py | Python | bsd-3-clause | 4,139 |
#!/usr/bin/python
import time
from Adafruit_I2C import Adafruit_I2C
# ===========================================================================
# VCNL4000 Class
# ===========================================================================
# Address of the sensor
VCNL4000_ADDRESS = 0x13
# Commands
VCNL4000_COMMAND = 0x80
VCNL4000_PRODUCTID = 0x81
VCNL4000_IRLED = 0x83
VCNL4000_AMBIENTPARAMETER = 0x84
VCNL4000_AMBIENTDATA = 0x85
VCNL4000_PROXIMITYDATA = 0x87
VCNL4000_SIGNALFREQ = 0x89
VCNL4000_PROXINITYADJUST = 0x8A
VCNL4000_3M125 = 0
VCNL4000_1M5625 = 1
VCNL4000_781K25 = 2
VCNL4000_390K625 = 3
VCNL4000_MEASUREAMBIENT = 0x10
VCNL4000_MEASUREPROXIMITY = 0x08
VCNL4000_AMBIENTREADY = 0x40
VCNL4000_PROXIMITYREADY = 0x20
class VCNL4000 :
i2c = None
# Constructor
def __init__(self, address=0x13):
self.i2c = Adafruit_I2C(address)
self.address = address
# Write proximity adjustement register
self.i2c.write8(VCNL4000_PROXINITYADJUST, 0x81);
# Read data from proximity sensor
def read_proximity(self):
self.i2c.write8(VCNL4000_COMMAND, VCNL4000_MEASUREPROXIMITY)
while True:
result = self.i2c.readU8(VCNL4000_COMMAND)
if (result and VCNL4000_PROXIMITYREADY):
return self.i2c.readU16(VCNL4000_PROXIMITYDATA)
time.sleep(0.001)
| platenspeler/LamPI-2.0 | rrd/Adafruit-Raspberry-Pi-Python-Code/Adafruit_VCNL4000/Adafruit_VCNL4000.py | Python | gpl-3.0 | 1,302 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Common methods for obtaining a reference to the provider driver class.
"""
__all__ = ["get_driver", "set_driver"]
def get_driver(drivers, provider, deprecated_providers=None, deprecated_constants=None):
"""
Get a driver.
:param drivers: Dictionary containing valid providers.
:type drivers: ``dict``
:param provider: Id (constant) of provider to get the driver for.
:type provider: :class:`libcloud.types.Provider`
:param: deprecated_providers: Dictionary with information about the
deprecated drivers.
:type deprecated_providers: ``dict``
:param: deprecated_constants: Dictionary with information about the
deprecated provider constants.
:type deprecated_constants: ``dict``
"""
# Those providers have been shut down or similar.
deprecated_providers = deprecated_providers or {}
if provider in deprecated_providers:
url = deprecated_providers[provider]["url"]
reason = deprecated_providers[provider]["reason"]
msg = "Provider no longer supported: %s, please visit: %s" % (url, reason)
raise Exception(msg)
# Those drivers have moved to "region" constructor argument model
deprecated_constants = deprecated_constants or {}
if provider in deprecated_constants:
old_name = provider.upper()
new_name = deprecated_constants[provider].upper()
url = "https://s.apache.org/lc0140un"
msg = (
'Provider constant "%s" has been removed. New constant '
'is now called "%s".\n'
"For more information on this change and how to modify your "
"code to work with it, please visit: %s" % (old_name, new_name, url)
)
raise Exception(msg)
if provider in drivers:
mod_name, driver_name = drivers[provider]
_mod = __import__(mod_name, globals(), locals(), [driver_name])
return getattr(_mod, driver_name)
# NOTE: This is for backward compatibility reasons where user could use
# a string value instead of a Provider.FOO enum constant and this function
# would still work
for provider_name, (mod_name, driver_name) in drivers.items():
# NOTE: This works because Provider enum class overloads __eq__
if provider.lower() == provider_name.lower():
_mod = __import__(mod_name, globals(), locals(), [driver_name])
return getattr(_mod, driver_name)
raise AttributeError("Provider %s does not exist" % (provider))
def set_driver(drivers, provider, module, klass):
"""
Sets a driver.
:param drivers: Dictionary to store providers.
:param provider: Id of provider to set driver for
:type provider: :class:`libcloud.types.Provider`
:param module: The module which contains the driver
:type module: L
:param klass: The driver class name
:type klass:
"""
if provider in drivers:
raise AttributeError("Provider %s already registered" % (provider))
drivers[provider] = (module, klass)
# Check if this driver is valid
try:
driver = get_driver(drivers, provider)
except (ImportError, AttributeError) as exp:
drivers.pop(provider)
raise exp
return driver
| apache/libcloud | libcloud/common/providers.py | Python | apache-2.0 | 4,027 |
import RPi.GPIO as GPIO #Importamos la libreria RPi.GPIO
import time #Importamos libreria para el control de tiempos
import numpy as np
GPIO.setmode(GPIO.BOARD) #Ponemos la Raspberry en modo BOARD
GPIO.setup(32,GPIO.OUT) #Ponemos el pin 12 como salida
p = GPIO.PWM(32,50) #Ponemos el pin 12 en modo PWM y enviamos 50 pulsos por segundo
print "inicio en 0" # Se inicia el servo con un estado 0= stop
p.start(0) #Inicia El tren de pulsos
# Bucle de 0 a 100 con paso de 0.1 que imprime en pantalla el valor del pulso mientras ejecuta su valor sobre el ServoMotor
# de 0 a 6 Giro Inverso
# de 6 a 8 quieto
# de 8 a 100 Gira en sentido Normal o Hacia el Frente y control de la velocidad hasta 60 aproximadamente..
while (1):
p.ChangeDutyCycle(60)
time.sleep(0.01)
p.stop()
GPIO.cleanup()
| UavLabsColombia/pibotballs | src/TestingFiles/ServoAdelante.py | Python | gpl-3.0 | 831 |
from enum import Enum, IntEnum, unique
class OperandType(IntEnum):
All = 0
Integer = 1
Float = 2
class TrigUnit(IntEnum):
Radians = 0
Degrees = 1
Gradients = 2
class Feature(IntEnum):
Symbolic = 1
StringConversion = 2
Rational = 4
class NotEnoughOperandsException(Exception):
def __init__(self, nbRequested, nbAvailable):
self.nbRequested = nbRequested
self.nbAvailable = nbAvailable
def __str__(self):
return "Not enough operands available. " + str(self.nbRequested) + " but only " + str(self.nbAvailable) + " available."
class WrongOperandsException(Exception):
def __init__(self, expectedTypes, nb=0):
try:
_ = (t for t in expectedTypes) # check for iterable
self.expectedTypes = expectedTypes
except TypeError:
self.expectedTypes = (expectedTypes,)
self.nb = nb
def __str__(self):
return "Wrong operands inputed. Expected " + str(self.expectedTypes) + "."
class CannotVerifyOperandsTypeException(Exception):
def __init__(self):
pass
def __str__(self):
return "Cannot verify operands type, array are not the same length."
class ExpressionNotValidException(Exception):
def __init__(self):
pass
def __str__(self):
return "Expression not valid."
class FeatureNotSupportedException(Exception):
def __init__(self):
pass
def __str__(self):
return "Feature not supported by Backend."
class UnsupportedBackendExpressionException(Exception):
def __init__(self):
pass
def __str__(self):
return "Expression is not a supported backend type."
class BackendException(Exception):
def __init__(self):
pass
def __str__(self):
return "Backend returned an exception during computation."
| lainwir3d/sailfish-rpn-calculator | common/python/rpncalc_common.py | Python | gpl-2.0 | 1,855 |
"""
define class Gene
"""
# from random import randint
from autoprover.evaluation import evaluation
# import logging
def random_chromosome(tactics):
"""generate a random chromosome with length of 15.
Args:
tactics (Tactic): a Tactic class instance.
Returns:
list: chromosome, a list of string(tactic).
"""
chromosome = []
for _ in range(15):
tactic = tactics.random_select()
if len(chromosome) == 0:
chromosome.append(tactic)
else:
while (tactics.is_unrepeatable(tactic) and
tactic == chromosome[-1]):
tactic = tactics.random_select()
chromosome.append(tactic)
return chromosome
class Gene:
"""
gene for gp
Args:
tactics (Tactic): a Tactic class instance.
chromosome (list): can provide a chromosome.
"""
def __init__(self, tactics=None, chromosome=None):
if chromosome is not None:
self.chromosome = chromosome
else:
self.chromosome = random_chromosome(tactics)
self.fitness = 0
self.raw_fitness = 0
self.coq_states = None
self.ttl = 0
def __len__(self):
return len(self.chromosome)
@property
def is_proof(self):
"""True if this gene is a Proof
Returns:
bool: is_proof
"""
if self.coq_states:
return self.coq_states[-1].is_proof
else:
return False
@property
def length_of_states(self):
"""A property of length_of_states
Returns:
int: length of self.coq_states
"""
return len(self.coq_states)
@property
def valid_tactics(self):
"""valid tactics from self.coq_states
Returns:
list: valid tactics
"""
if self.coq_states:
return [e.tactic for e in self.coq_states]
else:
return self.chromosome
@property
def goal(self):
"""return goal of gene
"""
return self.coq_states[-1].goal
def update_fitness_for_proof(self, proof, limit_hyp, limit_goal):
"""re-evaluate fitness for a proof
Args:
proof (Proof): a Proof instance.
"""
# TODO extract these two func into coqapi
coq_script = evaluation.preprocess(proof.theorem, self.chromosome)
run_output = evaluation.run_coqtop(coq_script)
self.coq_states = evaluation.get_coq_states(run_output, proof,
self.chromosome)
if self.is_proof:
return
self.raw_fitness = evaluation.calculate_fitness(
coq_states=self.coq_states[proof.offset:],
limit_hyp=limit_hyp,
limit_goal=limit_goal)
if len(self.chromosome) <= 20:
n_error = 0
else:
n_error = len(self.chromosome) - len(self.coq_states)
# self.fitness += 1 - (n_error / len(self.chromosome)) ** 2
self.raw_fitness += 1 - (n_error / len(self.chromosome))
# print(self.fitness)
# for state in self.coq_states:
# print(state)
return
def print_lastest(self):
"""print out gene
"""
print("c_len\ts_len\tfitness")
print(len(self.chromosome), end="\t")
print(self.length_of_states-1, end="\t")
print(self.fitness)
print('\n'.join(self.valid_tactics))
print(self.coq_states[-1])
return
def modification(self, data=None):
"""Modify one tactic of gene
"""
if self.is_proof:
return
if data is None:
self.print_lastest()
while True:
try:
if data:
edit_cmd = data
else:
edit_cmd = input("edit > ")
edit_cmd = edit_cmd.split()
except EOFError:
return
try:
if edit_cmd[0] == "state":
self.print_progress()
elif edit_cmd[0] == "list":
for index, tactic in enumerate(self.chromosome):
print("{}: {}".format(index, tactic))
elif edit_cmd[0] == "insert" or edit_cmd[0] == "replace":
if len(edit_cmd) < 2:
print("Expect a index here.")
continue
else:
edit_index = int(edit_cmd[1])
if edit_cmd[0] == "replace":
del self.chromosome[edit_index]
input_tactic = input("Please type a tactic: ")
self.chromosome.insert(edit_index, input_tactic)
break
elif edit_cmd[0] == "append":
if len(edit_cmd) == 2 and edit_cmd[1]:
self.chromosome.append(edit_cmd[1])
else:
input_tactic = input("Please type a tactic: ")
self.chromosome.append(input_tactic)
break
else:
print("state: all states")
print("list: print chromosome.")
print("insert <index>: insert a tactic before the index.")
print("replace <index>: replace the tactic of <index>-th.")
print("append: append the tactic at the end of chromosome")
except IndexError:
continue
if data is None:
print(self.chromosome)
else:
print("append by trigger!")
def format_output(self, proof):
"""Prepare a formated gene output
"""
format_string = ""
format_string += "\n".join(proof.theorem_body)
format_string += "\nProof.\n"
format_string += "\n".join([" "+e for e in self.valid_tactics[1:]])
format_string += "\n"
return format_string
def print_progress(self):
"""Print all state of gene
"""
for state in self.coq_states:
print(state)
def defrag(self, proof):
"""Defragment gene
"""
new_chromosome = [] + self.valid_tactics[proof.offset:]
i = proof.offset
print(self.chromosome)
for tactic in self.chromosome:
if i < len(self.valid_tactics) and tactic == self.valid_tactics[i]:
i += 1
continue
else:
new_chromosome.append(tactic)
self.chromosome = new_chromosome
print(self.chromosome)
| elic-eon/autoprover | autoprover/gp/gene.py | Python | mit | 6,711 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import importlib
from concurrent import futures
import pytest
from unittest import mock
from unittest.mock import patch
from google.api_core import operation as ga_operation
from google.api_core import exceptions as api_exceptions
from google.auth import credentials as auth_credentials
from google.cloud import aiplatform
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import models
from google.cloud.aiplatform import utils
from google.cloud.aiplatform_v1beta1.services.endpoint_service import (
client as endpoint_service_client_v1beta1,
)
from google.cloud.aiplatform_v1beta1.services.job_service import (
client as job_service_client_v1beta1,
)
from google.cloud.aiplatform_v1beta1.services.model_service import (
client as model_service_client_v1beta1,
)
from google.cloud.aiplatform_v1beta1.types import (
batch_prediction_job as gca_batch_prediction_job_v1beta1,
env_var as gca_env_var_v1beta1,
explanation as gca_explanation_v1beta1,
io as gca_io_v1beta1,
model as gca_model_v1beta1,
endpoint as gca_endpoint_v1beta1,
machine_resources as gca_machine_resources_v1beta1,
model_service as gca_model_service_v1beta1,
endpoint_service as gca_endpoint_service_v1beta1,
encryption_spec as gca_encryption_spec_v1beta1,
)
from google.cloud.aiplatform_v1.services.endpoint_service import (
client as endpoint_service_client,
)
from google.cloud.aiplatform_v1.services.job_service import client as job_service_client
from google.cloud.aiplatform_v1.services.model_service import (
client as model_service_client,
)
from google.cloud.aiplatform.compat.services import pipeline_service_client
from google.cloud.aiplatform_v1.types import (
batch_prediction_job as gca_batch_prediction_job,
io as gca_io,
job_state as gca_job_state,
model as gca_model,
endpoint as gca_endpoint,
machine_resources as gca_machine_resources,
model_service as gca_model_service,
endpoint_service as gca_endpoint_service,
encryption_spec as gca_encryption_spec,
)
from test_endpoints import create_endpoint_mock # noqa: F401
_TEST_PROJECT = "test-project"
_TEST_PROJECT_2 = "test-project-2"
_TEST_LOCATION = "us-central1"
_TEST_LOCATION_2 = "europe-west4"
_TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}"
_TEST_MODEL_NAME = "test-model"
_TEST_ARTIFACT_URI = "gs://test/artifact/uri"
_TEST_SERVING_CONTAINER_IMAGE = "gcr.io/test-serving/container:image"
_TEST_SERVING_CONTAINER_PREDICTION_ROUTE = "predict"
_TEST_SERVING_CONTAINER_HEALTH_ROUTE = "metadata"
_TEST_DESCRIPTION = "test description"
_TEST_SERVING_CONTAINER_COMMAND = ["python3", "run_my_model.py"]
_TEST_SERVING_CONTAINER_ARGS = ["--test", "arg"]
_TEST_SERVING_CONTAINER_ENVIRONMENT_VARIABLES = {
"learning_rate": 0.01,
"loss_fn": "mse",
}
_TEST_SERVING_CONTAINER_PORTS = [8888, 10000]
_TEST_ID = "1028944691210842416"
_TEST_LABEL = {"team": "experimentation", "trial_id": "x435"}
_TEST_MACHINE_TYPE = "n1-standard-4"
_TEST_ACCELERATOR_TYPE = "NVIDIA_TESLA_P100"
_TEST_ACCELERATOR_COUNT = 2
_TEST_STARTING_REPLICA_COUNT = 2
_TEST_MAX_REPLICA_COUNT = 12
_TEST_PIPELINE_RESOURCE_NAME = (
"projects/my-project/locations/us-central1/trainingPipeline/12345"
)
_TEST_BATCH_PREDICTION_GCS_SOURCE = "gs://example-bucket/folder/instance.jsonl"
_TEST_BATCH_PREDICTION_GCS_SOURCE_LIST = [
"gs://example-bucket/folder/instance1.jsonl",
"gs://example-bucket/folder/instance2.jsonl",
]
_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX = "gs://example-bucket/folder/output"
_TEST_BATCH_PREDICTION_BQ_PREFIX = "ucaip-sample-tests"
_TEST_BATCH_PREDICTION_BQ_DEST_PREFIX_WITH_PROTOCOL = (
f"bq://{_TEST_BATCH_PREDICTION_BQ_PREFIX}"
)
_TEST_BATCH_PREDICTION_DISPLAY_NAME = "test-batch-prediction-job"
_TEST_BATCH_PREDICTION_JOB_NAME = job_service_client.JobServiceClient.batch_prediction_job_path(
project=_TEST_PROJECT, location=_TEST_LOCATION, batch_prediction_job=_TEST_ID
)
_TEST_INSTANCE_SCHEMA_URI = "gs://test/schema/instance.yaml"
_TEST_PARAMETERS_SCHEMA_URI = "gs://test/schema/parameters.yaml"
_TEST_PREDICTION_SCHEMA_URI = "gs://test/schema/predictions.yaml"
_TEST_CREDENTIALS = mock.Mock(spec=auth_credentials.AnonymousCredentials())
_TEST_SERVICE_ACCOUNT = "vinnys@my-project.iam.gserviceaccount.com"
_TEST_EXPLANATION_METADATA = aiplatform.explain.ExplanationMetadata(
inputs={
"features": {
"input_tensor_name": "dense_input",
"encoding": "BAG_OF_FEATURES",
"modality": "numeric",
"index_feature_mapping": ["abc", "def", "ghj"],
}
},
outputs={"medv": {"output_tensor_name": "dense_2"}},
)
_TEST_EXPLANATION_PARAMETERS = aiplatform.explain.ExplanationParameters(
{"sampled_shapley_attribution": {"path_count": 10}}
)
# CMEK encryption
_TEST_ENCRYPTION_KEY_NAME = "key_1234"
_TEST_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec(
kms_key_name=_TEST_ENCRYPTION_KEY_NAME
)
_TEST_ENCRYPTION_SPEC_V1BETA1 = gca_encryption_spec_v1beta1.EncryptionSpec(
kms_key_name=_TEST_ENCRYPTION_KEY_NAME
)
_TEST_MODEL_RESOURCE_NAME = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION, _TEST_ID
)
_TEST_MODEL_RESOURCE_NAME_CUSTOM_PROJECT = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT_2, _TEST_LOCATION, _TEST_ID
)
_TEST_MODEL_RESOURCE_NAME_CUSTOM_LOCATION = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION_2, _TEST_ID
)
_TEST_OUTPUT_DIR = "gs://my-output-bucket"
_TEST_CONTAINER_REGISTRY_DESTINATION = (
"us-central1-docker.pkg.dev/projectId/repoName/imageName"
)
_TEST_EXPORT_FORMAT_ID_IMAGE = "custom-trained"
_TEST_EXPORT_FORMAT_ID_ARTIFACT = "tf-saved-model"
_TEST_SUPPORTED_EXPORT_FORMATS_IMAGE = [
gca_model.Model.ExportFormat(
id=_TEST_EXPORT_FORMAT_ID_IMAGE,
exportable_contents=[gca_model.Model.ExportFormat.ExportableContent.IMAGE],
)
]
_TEST_SUPPORTED_EXPORT_FORMATS_ARTIFACT = [
gca_model.Model.ExportFormat(
id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
exportable_contents=[gca_model.Model.ExportFormat.ExportableContent.ARTIFACT],
)
]
_TEST_SUPPORTED_EXPORT_FORMATS_BOTH = [
gca_model.Model.ExportFormat(
id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
exportable_contents=[
gca_model.Model.ExportFormat.ExportableContent.ARTIFACT,
gca_model.Model.ExportFormat.ExportableContent.IMAGE,
],
)
]
_TEST_SUPPORTED_EXPORT_FORMATS_UNSUPPORTED = []
_TEST_CONTAINER_REGISTRY_DESTINATION
@pytest.fixture
def get_endpoint_mock():
with mock.patch.object(
endpoint_service_client.EndpointServiceClient, "get_endpoint"
) as get_endpoint_mock:
test_endpoint_resource_name = endpoint_service_client.EndpointServiceClient.endpoint_path(
_TEST_PROJECT, _TEST_LOCATION, _TEST_ID
)
get_endpoint_mock.return_value = gca_endpoint.Endpoint(
display_name=_TEST_MODEL_NAME, name=test_endpoint_resource_name,
)
yield get_endpoint_mock
@pytest.fixture
def get_model_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME, name=_TEST_MODEL_RESOURCE_NAME,
)
yield get_model_mock
@pytest.fixture
def get_model_with_explanations_mock():
with mock.patch.object(
model_service_client_v1beta1.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model_v1beta1.Model(
display_name=_TEST_MODEL_NAME, name=_TEST_MODEL_RESOURCE_NAME,
)
yield get_model_mock
@pytest.fixture
def get_model_with_custom_location_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME_CUSTOM_LOCATION,
)
yield get_model_mock
@pytest.fixture
def get_model_with_custom_project_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME_CUSTOM_PROJECT,
artifact_uri=_TEST_ARTIFACT_URI,
description=_TEST_DESCRIPTION,
)
yield get_model_mock
@pytest.fixture
def get_model_with_training_job():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME_CUSTOM_PROJECT,
training_pipeline=_TEST_PIPELINE_RESOURCE_NAME,
)
yield get_model_mock
@pytest.fixture
def get_model_with_supported_export_formats_image():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME,
supported_export_formats=_TEST_SUPPORTED_EXPORT_FORMATS_IMAGE,
)
yield get_model_mock
@pytest.fixture
def get_model_with_supported_export_formats_artifact():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME,
supported_export_formats=_TEST_SUPPORTED_EXPORT_FORMATS_ARTIFACT,
)
yield get_model_mock
@pytest.fixture
def get_model_with_both_supported_export_formats():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME,
supported_export_formats=_TEST_SUPPORTED_EXPORT_FORMATS_BOTH,
)
yield get_model_mock
@pytest.fixture
def get_model_with_unsupported_export_formats():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as get_model_mock:
get_model_mock.return_value = gca_model.Model(
display_name=_TEST_MODEL_NAME,
name=_TEST_MODEL_RESOURCE_NAME,
supported_export_formats=_TEST_SUPPORTED_EXPORT_FORMATS_UNSUPPORTED,
)
yield get_model_mock
@pytest.fixture
def upload_model_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "upload_model"
) as upload_model_mock:
mock_lro = mock.Mock(ga_operation.Operation)
mock_lro.result.return_value = gca_model_service.UploadModelResponse(
model=_TEST_MODEL_RESOURCE_NAME
)
upload_model_mock.return_value = mock_lro
yield upload_model_mock
@pytest.fixture
def upload_model_with_explanations_mock():
with mock.patch.object(
model_service_client_v1beta1.ModelServiceClient, "upload_model"
) as upload_model_mock:
mock_lro = mock.Mock(ga_operation.Operation)
mock_lro.result.return_value = gca_model_service_v1beta1.UploadModelResponse(
model=_TEST_MODEL_RESOURCE_NAME
)
upload_model_mock.return_value = mock_lro
yield upload_model_mock
@pytest.fixture
def upload_model_with_custom_project_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "upload_model"
) as upload_model_mock:
mock_lro = mock.Mock(ga_operation.Operation)
mock_lro.result.return_value = gca_model_service.UploadModelResponse(
model=_TEST_MODEL_RESOURCE_NAME_CUSTOM_PROJECT
)
upload_model_mock.return_value = mock_lro
yield upload_model_mock
@pytest.fixture
def upload_model_with_custom_location_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "upload_model"
) as upload_model_mock:
mock_lro = mock.Mock(ga_operation.Operation)
mock_lro.result.return_value = gca_model_service.UploadModelResponse(
model=_TEST_MODEL_RESOURCE_NAME_CUSTOM_LOCATION
)
upload_model_mock.return_value = mock_lro
yield upload_model_mock
@pytest.fixture
def export_model_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "export_model"
) as export_model_mock:
export_model_lro_mock = mock.Mock(ga_operation.Operation)
export_model_lro_mock.metadata = gca_model_service.ExportModelOperationMetadata(
output_info=gca_model_service.ExportModelOperationMetadata.OutputInfo(
artifact_output_uri=_TEST_OUTPUT_DIR
)
)
export_model_lro_mock.result.return_value = None
export_model_mock.return_value = export_model_lro_mock
yield export_model_mock
@pytest.fixture
def delete_model_mock():
with mock.patch.object(
model_service_client.ModelServiceClient, "delete_model"
) as delete_model_mock:
delete_model_lro_mock = mock.Mock(ga_operation.Operation)
delete_model_lro_mock.result.return_value = (
gca_model_service.DeleteModelRequest()
)
delete_model_mock.return_value = delete_model_lro_mock
yield delete_model_mock
@pytest.fixture
def deploy_model_mock():
with mock.patch.object(
endpoint_service_client.EndpointServiceClient, "deploy_model"
) as deploy_model_mock:
deployed_model = gca_endpoint.DeployedModel(
model=_TEST_MODEL_RESOURCE_NAME, display_name=_TEST_MODEL_NAME,
)
deploy_model_lro_mock = mock.Mock(ga_operation.Operation)
deploy_model_lro_mock.result.return_value = gca_endpoint_service.DeployModelResponse(
deployed_model=deployed_model,
)
deploy_model_mock.return_value = deploy_model_lro_mock
yield deploy_model_mock
@pytest.fixture
def deploy_model_with_explanations_mock():
with mock.patch.object(
endpoint_service_client_v1beta1.EndpointServiceClient, "deploy_model"
) as deploy_model_mock:
deployed_model = gca_endpoint_v1beta1.DeployedModel(
model=_TEST_MODEL_RESOURCE_NAME, display_name=_TEST_MODEL_NAME,
)
deploy_model_lro_mock = mock.Mock(ga_operation.Operation)
deploy_model_lro_mock.result.return_value = gca_endpoint_service_v1beta1.DeployModelResponse(
deployed_model=deployed_model,
)
deploy_model_mock.return_value = deploy_model_lro_mock
yield deploy_model_mock
@pytest.fixture
def get_batch_prediction_job_mock():
with mock.patch.object(
job_service_client.JobServiceClient, "get_batch_prediction_job"
) as get_batch_prediction_job_mock:
batch_prediction_mock = mock.Mock(
spec=gca_batch_prediction_job.BatchPredictionJob
)
batch_prediction_mock.state = gca_job_state.JobState.JOB_STATE_SUCCEEDED
batch_prediction_mock.name = _TEST_BATCH_PREDICTION_JOB_NAME
get_batch_prediction_job_mock.return_value = batch_prediction_mock
yield get_batch_prediction_job_mock
@pytest.fixture
def create_batch_prediction_job_mock():
with mock.patch.object(
job_service_client.JobServiceClient, "create_batch_prediction_job"
) as create_batch_prediction_job_mock:
batch_prediction_job_mock = mock.Mock(
spec=gca_batch_prediction_job.BatchPredictionJob
)
batch_prediction_job_mock.name = _TEST_BATCH_PREDICTION_JOB_NAME
create_batch_prediction_job_mock.return_value = batch_prediction_job_mock
yield create_batch_prediction_job_mock
@pytest.fixture
def create_batch_prediction_job_with_explanations_mock():
with mock.patch.object(
job_service_client_v1beta1.JobServiceClient, "create_batch_prediction_job"
) as create_batch_prediction_job_mock:
batch_prediction_job_mock = mock.Mock(
spec=gca_batch_prediction_job_v1beta1.BatchPredictionJob
)
batch_prediction_job_mock.name = _TEST_BATCH_PREDICTION_JOB_NAME
create_batch_prediction_job_mock.return_value = batch_prediction_job_mock
yield create_batch_prediction_job_mock
@pytest.fixture
def get_training_job_non_existent_mock():
with patch.object(
pipeline_service_client.PipelineServiceClient, "get_training_pipeline"
) as get_training_job_non_existent_mock:
get_training_job_non_existent_mock.side_effect = api_exceptions.NotFound("404")
yield get_training_job_non_existent_mock
@pytest.fixture
def create_client_mock():
with mock.patch.object(
initializer.global_config, "create_client"
) as create_client_mock:
api_client_mock = mock.Mock(spec=model_service_client.ModelServiceClient)
create_client_mock.return_value = api_client_mock
yield create_client_mock
class TestModel:
def setup_method(self):
importlib.reload(initializer)
importlib.reload(aiplatform)
aiplatform.init(project=_TEST_PROJECT, location=_TEST_LOCATION)
def teardown_method(self):
initializer.global_pool.shutdown(wait=True)
def test_constructor_creates_client(self, create_client_mock):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
credentials=_TEST_CREDENTIALS,
)
models.Model(_TEST_ID)
create_client_mock.assert_called_once_with(
client_class=utils.ModelClientWithOverride,
credentials=initializer.global_config.credentials,
location_override=_TEST_LOCATION,
prediction_client=False,
)
def test_constructor_create_client_with_custom_location(self, create_client_mock):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
credentials=_TEST_CREDENTIALS,
)
models.Model(_TEST_ID, location=_TEST_LOCATION_2)
create_client_mock.assert_called_once_with(
client_class=utils.ModelClientWithOverride,
credentials=initializer.global_config.credentials,
location_override=_TEST_LOCATION_2,
prediction_client=False,
)
def test_constructor_creates_client_with_custom_credentials(
self, create_client_mock
):
creds = auth_credentials.AnonymousCredentials()
models.Model(_TEST_ID, credentials=creds)
create_client_mock.assert_called_once_with(
client_class=utils.ModelClientWithOverride,
credentials=creds,
location_override=_TEST_LOCATION,
prediction_client=False,
)
def test_constructor_gets_model(self, get_model_mock):
models.Model(_TEST_ID)
get_model_mock.assert_called_once_with(name=_TEST_MODEL_RESOURCE_NAME)
def test_constructor_gets_model_with_custom_project(self, get_model_mock):
models.Model(_TEST_ID, project=_TEST_PROJECT_2)
test_model_resource_name = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT_2, _TEST_LOCATION, _TEST_ID
)
get_model_mock.assert_called_once_with(name=test_model_resource_name)
def test_constructor_gets_model_with_custom_location(self, get_model_mock):
models.Model(_TEST_ID, location=_TEST_LOCATION_2)
test_model_resource_name = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION_2, _TEST_ID
)
get_model_mock.assert_called_once_with(name=test_model_resource_name)
@pytest.mark.parametrize("sync", [True, False])
def test_upload_uploads_and_gets_model(
self, upload_model_mock, get_model_mock, sync
):
my_model = models.Model.upload(
display_name=_TEST_MODEL_NAME,
serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE,
serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
sync=sync,
)
if not sync:
my_model.wait()
container_spec = gca_model.ModelContainerSpec(
image_uri=_TEST_SERVING_CONTAINER_IMAGE,
predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
)
managed_model = gca_model.Model(
display_name=_TEST_MODEL_NAME, container_spec=container_spec,
)
upload_model_mock.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
model=managed_model,
)
get_model_mock.assert_called_once_with(name=_TEST_MODEL_RESOURCE_NAME)
@pytest.mark.parametrize("sync", [True, False])
def test_upload_uploads_and_gets_model_with_labels(
self, upload_model_mock, get_model_mock, sync
):
my_model = models.Model.upload(
display_name=_TEST_MODEL_NAME,
serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE,
serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
labels=_TEST_LABEL,
sync=sync,
)
if not sync:
my_model.wait()
container_spec = gca_model.ModelContainerSpec(
image_uri=_TEST_SERVING_CONTAINER_IMAGE,
predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
)
managed_model = gca_model.Model(
display_name=_TEST_MODEL_NAME,
container_spec=container_spec,
labels=_TEST_LABEL,
)
upload_model_mock.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
model=managed_model,
)
get_model_mock.assert_called_once_with(name=_TEST_MODEL_RESOURCE_NAME)
def test_upload_raises_with_impartial_explanation_spec(self):
with pytest.raises(ValueError) as e:
models.Model.upload(
display_name=_TEST_MODEL_NAME,
artifact_uri=_TEST_ARTIFACT_URI,
serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE,
explanation_parameters=_TEST_EXPLANATION_PARAMETERS
# Missing the required explanations_metadata field
)
assert e.match(regexp=r"`explanation_parameters` should be specified or None.")
@pytest.mark.parametrize("sync", [True, False])
def test_upload_uploads_and_gets_model_with_all_args(
self, upload_model_with_explanations_mock, get_model_mock, sync
):
my_model = models.Model.upload(
display_name=_TEST_MODEL_NAME,
artifact_uri=_TEST_ARTIFACT_URI,
serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE,
serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
instance_schema_uri=_TEST_INSTANCE_SCHEMA_URI,
parameters_schema_uri=_TEST_PARAMETERS_SCHEMA_URI,
prediction_schema_uri=_TEST_PREDICTION_SCHEMA_URI,
description=_TEST_DESCRIPTION,
serving_container_command=_TEST_SERVING_CONTAINER_COMMAND,
serving_container_args=_TEST_SERVING_CONTAINER_ARGS,
serving_container_environment_variables=_TEST_SERVING_CONTAINER_ENVIRONMENT_VARIABLES,
serving_container_ports=_TEST_SERVING_CONTAINER_PORTS,
explanation_metadata=_TEST_EXPLANATION_METADATA,
explanation_parameters=_TEST_EXPLANATION_PARAMETERS,
labels=_TEST_LABEL,
sync=sync,
)
if not sync:
my_model.wait()
env = [
gca_env_var_v1beta1.EnvVar(name=str(key), value=str(value))
for key, value in _TEST_SERVING_CONTAINER_ENVIRONMENT_VARIABLES.items()
]
ports = [
gca_model_v1beta1.Port(container_port=port)
for port in _TEST_SERVING_CONTAINER_PORTS
]
container_spec = gca_model_v1beta1.ModelContainerSpec(
image_uri=_TEST_SERVING_CONTAINER_IMAGE,
predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
command=_TEST_SERVING_CONTAINER_COMMAND,
args=_TEST_SERVING_CONTAINER_ARGS,
env=env,
ports=ports,
)
managed_model = gca_model_v1beta1.Model(
display_name=_TEST_MODEL_NAME,
description=_TEST_DESCRIPTION,
artifact_uri=_TEST_ARTIFACT_URI,
container_spec=container_spec,
predict_schemata=gca_model_v1beta1.PredictSchemata(
instance_schema_uri=_TEST_INSTANCE_SCHEMA_URI,
parameters_schema_uri=_TEST_PARAMETERS_SCHEMA_URI,
prediction_schema_uri=_TEST_PREDICTION_SCHEMA_URI,
),
explanation_spec=gca_model_v1beta1.explanation.ExplanationSpec(
metadata=_TEST_EXPLANATION_METADATA,
parameters=_TEST_EXPLANATION_PARAMETERS,
),
labels=_TEST_LABEL,
)
upload_model_with_explanations_mock.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
model=managed_model,
)
get_model_mock.assert_called_once_with(name=_TEST_MODEL_RESOURCE_NAME)
@pytest.mark.usefixtures("get_model_with_custom_project_mock")
@pytest.mark.parametrize("sync", [True, False])
def test_upload_uploads_and_gets_model_with_custom_project(
self,
upload_model_with_custom_project_mock,
get_model_with_custom_project_mock,
sync,
):
test_model_resource_name = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT_2, _TEST_LOCATION, _TEST_ID
)
my_model = models.Model.upload(
display_name=_TEST_MODEL_NAME,
artifact_uri=_TEST_ARTIFACT_URI,
serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE,
serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
project=_TEST_PROJECT_2,
sync=sync,
)
if not sync:
my_model.wait()
container_spec = gca_model.ModelContainerSpec(
image_uri=_TEST_SERVING_CONTAINER_IMAGE,
predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
)
managed_model = gca_model.Model(
display_name=_TEST_MODEL_NAME,
artifact_uri=_TEST_ARTIFACT_URI,
container_spec=container_spec,
)
upload_model_with_custom_project_mock.assert_called_once_with(
parent=f"projects/{_TEST_PROJECT_2}/locations/{_TEST_LOCATION}",
model=managed_model,
)
get_model_with_custom_project_mock.assert_called_once_with(
name=test_model_resource_name
)
assert my_model.uri == _TEST_ARTIFACT_URI
assert my_model.supported_export_formats == {}
assert my_model.supported_deployment_resources_types == []
assert my_model.supported_input_storage_formats == []
assert my_model.supported_output_storage_formats == []
assert my_model.description == _TEST_DESCRIPTION
@pytest.mark.usefixtures("get_model_with_custom_project_mock")
def test_accessing_properties_with_no_resource_raises(self,):
test_model_resource_name = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT_2, _TEST_LOCATION, _TEST_ID
)
my_model = models.Model(test_model_resource_name)
my_model._gca_resource = None
with pytest.raises(RuntimeError) as e:
my_model.uri
e.match(regexp=r"Model resource has not been created.")
with pytest.raises(RuntimeError) as e:
my_model.supported_export_formats
e.match(regexp=r"Model resource has not been created.")
with pytest.raises(RuntimeError) as e:
my_model.supported_deployment_resources_types
e.match(regexp=r"Model resource has not been created.")
with pytest.raises(RuntimeError) as e:
my_model.supported_input_storage_formats
e.match(regexp=r"Model resource has not been created.")
with pytest.raises(RuntimeError) as e:
my_model.supported_output_storage_formats
e.match(regexp=r"Model resource has not been created.")
with pytest.raises(RuntimeError) as e:
my_model.description
e.match(regexp=r"Model resource has not been created.")
@pytest.mark.usefixtures("get_model_with_custom_location_mock")
@pytest.mark.parametrize("sync", [True, False])
def test_upload_uploads_and_gets_model_with_custom_location(
self,
upload_model_with_custom_location_mock,
get_model_with_custom_location_mock,
sync,
):
test_model_resource_name = model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION_2, _TEST_ID
)
my_model = models.Model.upload(
display_name=_TEST_MODEL_NAME,
artifact_uri=_TEST_ARTIFACT_URI,
serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE,
serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
location=_TEST_LOCATION_2,
sync=sync,
)
if not sync:
my_model.wait()
container_spec = gca_model.ModelContainerSpec(
image_uri=_TEST_SERVING_CONTAINER_IMAGE,
predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE,
health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE,
)
managed_model = gca_model.Model(
display_name=_TEST_MODEL_NAME,
artifact_uri=_TEST_ARTIFACT_URI,
container_spec=container_spec,
)
upload_model_with_custom_location_mock.assert_called_once_with(
parent=f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION_2}",
model=managed_model,
)
get_model_with_custom_location_mock.assert_called_once_with(
name=test_model_resource_name
)
@pytest.mark.usefixtures("get_endpoint_mock", "get_model_mock")
@pytest.mark.parametrize("sync", [True, False])
def test_deploy(self, deploy_model_mock, sync):
test_model = models.Model(_TEST_ID)
test_endpoint = models.Endpoint(_TEST_ID)
assert test_model.deploy(test_endpoint, sync=sync,) == test_endpoint
if not sync:
test_endpoint.wait()
automatic_resources = gca_machine_resources.AutomaticResources(
min_replica_count=1, max_replica_count=1,
)
deployed_model = gca_endpoint.DeployedModel(
automatic_resources=automatic_resources,
model=test_model.resource_name,
display_name=None,
)
deploy_model_mock.assert_called_once_with(
endpoint=test_endpoint.resource_name,
deployed_model=deployed_model,
traffic_split={"0": 100},
metadata=(),
)
@pytest.mark.usefixtures(
"get_endpoint_mock", "get_model_mock", "create_endpoint_mock"
)
@pytest.mark.parametrize("sync", [True, False])
def test_deploy_no_endpoint(self, deploy_model_mock, sync):
test_model = models.Model(_TEST_ID)
test_endpoint = test_model.deploy(sync=sync)
if not sync:
test_endpoint.wait()
automatic_resources = gca_machine_resources.AutomaticResources(
min_replica_count=1, max_replica_count=1,
)
deployed_model = gca_endpoint.DeployedModel(
automatic_resources=automatic_resources,
model=test_model.resource_name,
display_name=None,
)
deploy_model_mock.assert_called_once_with(
endpoint=test_endpoint.resource_name,
deployed_model=deployed_model,
traffic_split={"0": 100},
metadata=(),
)
@pytest.mark.usefixtures(
"get_endpoint_mock", "get_model_mock", "create_endpoint_mock"
)
@pytest.mark.parametrize("sync", [True, False])
def test_deploy_no_endpoint_dedicated_resources(self, deploy_model_mock, sync):
test_model = models.Model(_TEST_ID)
test_endpoint = test_model.deploy(
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
service_account=_TEST_SERVICE_ACCOUNT,
sync=sync,
)
if not sync:
test_endpoint.wait()
expected_machine_spec = gca_machine_resources.MachineSpec(
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
)
expected_dedicated_resources = gca_machine_resources.DedicatedResources(
machine_spec=expected_machine_spec, min_replica_count=1, max_replica_count=1
)
expected_deployed_model = gca_endpoint.DeployedModel(
dedicated_resources=expected_dedicated_resources,
model=test_model.resource_name,
display_name=None,
service_account=_TEST_SERVICE_ACCOUNT,
)
deploy_model_mock.assert_called_once_with(
endpoint=test_endpoint.resource_name,
deployed_model=expected_deployed_model,
traffic_split={"0": 100},
metadata=(),
)
@pytest.mark.usefixtures(
"get_endpoint_mock", "get_model_mock", "create_endpoint_mock"
)
@pytest.mark.parametrize("sync", [True, False])
def test_deploy_no_endpoint_with_explanations(
self, deploy_model_with_explanations_mock, sync
):
test_model = models.Model(_TEST_ID)
test_endpoint = test_model.deploy(
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
explanation_metadata=_TEST_EXPLANATION_METADATA,
explanation_parameters=_TEST_EXPLANATION_PARAMETERS,
sync=sync,
)
if not sync:
test_endpoint.wait()
expected_machine_spec = gca_machine_resources_v1beta1.MachineSpec(
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
)
expected_dedicated_resources = gca_machine_resources_v1beta1.DedicatedResources(
machine_spec=expected_machine_spec, min_replica_count=1, max_replica_count=1
)
expected_deployed_model = gca_endpoint_v1beta1.DeployedModel(
dedicated_resources=expected_dedicated_resources,
model=test_model.resource_name,
display_name=None,
explanation_spec=gca_endpoint_v1beta1.explanation.ExplanationSpec(
metadata=_TEST_EXPLANATION_METADATA,
parameters=_TEST_EXPLANATION_PARAMETERS,
),
)
deploy_model_with_explanations_mock.assert_called_once_with(
endpoint=test_endpoint.resource_name,
deployed_model=expected_deployed_model,
traffic_split={"0": 100},
metadata=(),
)
@pytest.mark.usefixtures(
"get_endpoint_mock", "get_model_mock", "create_endpoint_mock"
)
def test_deploy_raises_with_impartial_explanation_spec(self):
test_model = models.Model(_TEST_ID)
with pytest.raises(ValueError) as e:
test_model.deploy(
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
explanation_metadata=_TEST_EXPLANATION_METADATA,
# Missing required `explanation_parameters` argument
)
assert e.match(regexp=r"`explanation_parameters` should be specified or None.")
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_init_aiplatform_with_encryption_key_name_and_batch_predict_gcs_source_and_dest(
self, create_batch_prediction_job_mock, sync
):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
encryption_spec_key_name=_TEST_ENCRYPTION_KEY_NAME,
)
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call
batch_prediction_job = test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
gcs_destination_prefix=_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX,
sync=sync,
)
if not sync:
batch_prediction_job.wait()
# Construct expected request
expected_gapic_batch_prediction_job = gca_batch_prediction_job.BatchPredictionJob(
display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
model=model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION, _TEST_ID
),
input_config=gca_batch_prediction_job.BatchPredictionJob.InputConfig(
instances_format="jsonl",
gcs_source=gca_io.GcsSource(uris=[_TEST_BATCH_PREDICTION_GCS_SOURCE]),
),
output_config=gca_batch_prediction_job.BatchPredictionJob.OutputConfig(
gcs_destination=gca_io.GcsDestination(
output_uri_prefix=_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX
),
predictions_format="jsonl",
),
encryption_spec=_TEST_ENCRYPTION_SPEC,
)
create_batch_prediction_job_mock.assert_called_once_with(
parent=_TEST_PARENT,
batch_prediction_job=expected_gapic_batch_prediction_job,
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_gcs_source_and_dest(
self, create_batch_prediction_job_mock, sync
):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call
batch_prediction_job = test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
gcs_destination_prefix=_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX,
sync=sync,
)
if not sync:
batch_prediction_job.wait()
# Construct expected request
expected_gapic_batch_prediction_job = gca_batch_prediction_job.BatchPredictionJob(
display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
model=model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION, _TEST_ID
),
input_config=gca_batch_prediction_job.BatchPredictionJob.InputConfig(
instances_format="jsonl",
gcs_source=gca_io.GcsSource(uris=[_TEST_BATCH_PREDICTION_GCS_SOURCE]),
),
output_config=gca_batch_prediction_job.BatchPredictionJob.OutputConfig(
gcs_destination=gca_io.GcsDestination(
output_uri_prefix=_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX
),
predictions_format="jsonl",
),
)
create_batch_prediction_job_mock.assert_called_once_with(
parent=_TEST_PARENT,
batch_prediction_job=expected_gapic_batch_prediction_job,
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_gcs_source_bq_dest(
self, create_batch_prediction_job_mock, sync
):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call
batch_prediction_job = test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
bigquery_destination_prefix=_TEST_BATCH_PREDICTION_BQ_PREFIX,
sync=sync,
)
if not sync:
batch_prediction_job.wait()
# Construct expected request
expected_gapic_batch_prediction_job = gca_batch_prediction_job.BatchPredictionJob(
display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
model=model_service_client.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION, _TEST_ID
),
input_config=gca_batch_prediction_job.BatchPredictionJob.InputConfig(
instances_format="jsonl",
gcs_source=gca_io.GcsSource(uris=[_TEST_BATCH_PREDICTION_GCS_SOURCE]),
),
output_config=gca_batch_prediction_job.BatchPredictionJob.OutputConfig(
bigquery_destination=gca_io.BigQueryDestination(
output_uri=_TEST_BATCH_PREDICTION_BQ_DEST_PREFIX_WITH_PROTOCOL
),
predictions_format="bigquery",
),
)
create_batch_prediction_job_mock.assert_called_once_with(
parent=_TEST_PARENT,
batch_prediction_job=expected_gapic_batch_prediction_job,
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_with_all_args(
self, create_batch_prediction_job_with_explanations_mock, sync
):
test_model = models.Model(_TEST_ID)
creds = auth_credentials.AnonymousCredentials()
# Make SDK batch_predict method call passing all arguments
batch_prediction_job = test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
gcs_destination_prefix=_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX,
predictions_format="csv",
model_parameters={},
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
starting_replica_count=_TEST_STARTING_REPLICA_COUNT,
max_replica_count=_TEST_MAX_REPLICA_COUNT,
generate_explanation=True,
explanation_metadata=_TEST_EXPLANATION_METADATA,
explanation_parameters=_TEST_EXPLANATION_PARAMETERS,
labels=_TEST_LABEL,
credentials=creds,
encryption_spec_key_name=_TEST_ENCRYPTION_KEY_NAME,
sync=sync,
)
if not sync:
batch_prediction_job.wait()
# Construct expected request
expected_gapic_batch_prediction_job = gca_batch_prediction_job_v1beta1.BatchPredictionJob(
display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
model=model_service_client_v1beta1.ModelServiceClient.model_path(
_TEST_PROJECT, _TEST_LOCATION, _TEST_ID
),
input_config=gca_batch_prediction_job_v1beta1.BatchPredictionJob.InputConfig(
instances_format="jsonl",
gcs_source=gca_io_v1beta1.GcsSource(
uris=[_TEST_BATCH_PREDICTION_GCS_SOURCE]
),
),
output_config=gca_batch_prediction_job_v1beta1.BatchPredictionJob.OutputConfig(
gcs_destination=gca_io_v1beta1.GcsDestination(
output_uri_prefix=_TEST_BATCH_PREDICTION_GCS_DEST_PREFIX
),
predictions_format="csv",
),
dedicated_resources=gca_machine_resources_v1beta1.BatchDedicatedResources(
machine_spec=gca_machine_resources_v1beta1.MachineSpec(
machine_type=_TEST_MACHINE_TYPE,
accelerator_type=_TEST_ACCELERATOR_TYPE,
accelerator_count=_TEST_ACCELERATOR_COUNT,
),
starting_replica_count=_TEST_STARTING_REPLICA_COUNT,
max_replica_count=_TEST_MAX_REPLICA_COUNT,
),
generate_explanation=True,
explanation_spec=gca_explanation_v1beta1.ExplanationSpec(
metadata=_TEST_EXPLANATION_METADATA,
parameters=_TEST_EXPLANATION_PARAMETERS,
),
labels=_TEST_LABEL,
encryption_spec=_TEST_ENCRYPTION_SPEC_V1BETA1,
)
create_batch_prediction_job_with_explanations_mock.assert_called_once_with(
parent=f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}",
batch_prediction_job=expected_gapic_batch_prediction_job,
)
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_no_source(self, create_batch_prediction_job_mock):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call without source
with pytest.raises(ValueError) as e:
test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
bigquery_destination_prefix=_TEST_BATCH_PREDICTION_BQ_PREFIX,
)
assert e.match(regexp=r"source")
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_two_sources(self, create_batch_prediction_job_mock):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call with two sources
with pytest.raises(ValueError) as e:
test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
bigquery_source=_TEST_BATCH_PREDICTION_BQ_PREFIX,
bigquery_destination_prefix=_TEST_BATCH_PREDICTION_BQ_PREFIX,
)
assert e.match(regexp=r"source")
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_no_destination(self):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call without destination
with pytest.raises(ValueError) as e:
test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
)
assert e.match(regexp=r"destination")
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_wrong_instance_format(self):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call
with pytest.raises(ValueError) as e:
test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
instances_format="wrong",
bigquery_destination_prefix=_TEST_BATCH_PREDICTION_BQ_PREFIX,
)
assert e.match(regexp=r"accepted instances format")
@pytest.mark.usefixtures("get_model_mock", "get_batch_prediction_job_mock")
def test_batch_predict_wrong_prediction_format(self):
test_model = models.Model(_TEST_ID)
# Make SDK batch_predict method call
with pytest.raises(ValueError) as e:
test_model.batch_predict(
job_display_name=_TEST_BATCH_PREDICTION_DISPLAY_NAME,
gcs_source=_TEST_BATCH_PREDICTION_GCS_SOURCE,
predictions_format="wrong",
bigquery_destination_prefix=_TEST_BATCH_PREDICTION_BQ_PREFIX,
)
assert e.match(regexp=r"accepted prediction format")
@pytest.mark.usefixtures("get_model_mock")
@pytest.mark.parametrize("sync", [True, False])
def test_delete_model(self, delete_model_mock, sync):
test_model = models.Model(_TEST_ID)
test_model.delete(sync=sync)
if not sync:
test_model.wait()
delete_model_mock.assert_called_once_with(name=test_model.resource_name)
@pytest.mark.usefixtures("get_model_mock")
def test_print_model(self):
test_model = models.Model(_TEST_ID)
assert (
repr(test_model)
== f"{object.__repr__(test_model)} \nresource name: {test_model.resource_name}"
)
@pytest.mark.usefixtures("get_model_mock")
def test_print_model_if_waiting(self):
test_model = models.Model(_TEST_ID)
test_model._gca_resource = None
test_model._latest_future = futures.Future()
assert (
repr(test_model)
== f"{object.__repr__(test_model)} is waiting for upstream dependencies to complete."
)
@pytest.mark.usefixtures("get_model_mock")
def test_print_model_if_exception(self):
test_model = models.Model(_TEST_ID)
test_model._gca_resource = None
mock_exception = Exception("mock exception")
test_model._exception = mock_exception
assert (
repr(test_model)
== f"{object.__repr__(test_model)} failed with {str(mock_exception)}"
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_with_supported_export_formats_artifact")
def test_export_model_as_artifact(self, export_model_mock, sync):
test_model = models.Model(_TEST_ID)
if not sync:
test_model.wait()
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
artifact_destination=_TEST_OUTPUT_DIR,
)
expected_output_config = gca_model_service.ExportModelRequest.OutputConfig(
export_format_id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
artifact_destination=gca_io.GcsDestination(
output_uri_prefix=_TEST_OUTPUT_DIR
),
)
export_model_mock.assert_called_once_with(
name=f"{_TEST_PARENT}/models/{_TEST_ID}",
output_config=expected_output_config,
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_with_supported_export_formats_image")
def test_export_model_as_image(self, export_model_mock, sync):
test_model = models.Model(_TEST_ID)
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_IMAGE,
image_destination=_TEST_CONTAINER_REGISTRY_DESTINATION,
)
if not sync:
test_model.wait()
expected_output_config = gca_model_service.ExportModelRequest.OutputConfig(
export_format_id=_TEST_EXPORT_FORMAT_ID_IMAGE,
image_destination=gca_io.ContainerRegistryDestination(
output_uri=_TEST_CONTAINER_REGISTRY_DESTINATION
),
)
export_model_mock.assert_called_once_with(
name=f"{_TEST_PARENT}/models/{_TEST_ID}",
output_config=expected_output_config,
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_with_both_supported_export_formats")
def test_export_model_as_both_formats(self, export_model_mock, sync):
"""Exports a 'tf-saved-model' as both an artifact and an image"""
test_model = models.Model(_TEST_ID)
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
image_destination=_TEST_CONTAINER_REGISTRY_DESTINATION,
artifact_destination=_TEST_OUTPUT_DIR,
)
if not sync:
test_model.wait()
expected_output_config = gca_model_service.ExportModelRequest.OutputConfig(
export_format_id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
image_destination=gca_io.ContainerRegistryDestination(
output_uri=_TEST_CONTAINER_REGISTRY_DESTINATION
),
artifact_destination=gca_io.GcsDestination(
output_uri_prefix=_TEST_OUTPUT_DIR
),
)
export_model_mock.assert_called_once_with(
name=f"{_TEST_PARENT}/models/{_TEST_ID}",
output_config=expected_output_config,
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_with_unsupported_export_formats")
def test_export_model_not_supported(self, export_model_mock, sync):
test_model = models.Model(_TEST_ID)
with pytest.raises(ValueError) as e:
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_IMAGE,
image_destination=_TEST_CONTAINER_REGISTRY_DESTINATION,
)
if not sync:
test_model.wait()
assert e.match(
regexp=f"The model `{_TEST_PARENT}/models/{_TEST_ID}` is not exportable."
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_with_supported_export_formats_image")
def test_export_model_as_image_with_invalid_args(self, export_model_mock, sync):
# Passing an artifact destination on an image-only Model
with pytest.raises(ValueError) as dest_type_err:
test_model = models.Model(_TEST_ID)
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_IMAGE,
artifact_destination=_TEST_OUTPUT_DIR,
sync=sync,
)
if not sync:
test_model.wait()
# Passing no destination type
with pytest.raises(ValueError) as no_dest_err:
test_model = models.Model(_TEST_ID)
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_IMAGE, sync=sync,
)
if not sync:
test_model.wait()
# Passing an invalid export format ID
with pytest.raises(ValueError) as format_err:
test_model = models.Model(_TEST_ID)
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
image_destination=_TEST_CONTAINER_REGISTRY_DESTINATION,
sync=sync,
)
if not sync:
test_model.wait()
assert dest_type_err.match(
regexp=r"This model can not be exported as an artifact."
)
assert no_dest_err.match(regexp=r"Please provide an")
assert format_err.match(
regexp=f"'{_TEST_EXPORT_FORMAT_ID_ARTIFACT}' is not a supported export format"
)
@pytest.mark.parametrize("sync", [True, False])
@pytest.mark.usefixtures("get_model_with_supported_export_formats_artifact")
def test_export_model_as_artifact_with_invalid_args(self, export_model_mock, sync):
test_model = models.Model(_TEST_ID)
# Passing an image destination on an artifact-only Model
with pytest.raises(ValueError) as e:
test_model.export_model(
export_format_id=_TEST_EXPORT_FORMAT_ID_ARTIFACT,
image_destination=_TEST_CONTAINER_REGISTRY_DESTINATION,
sync=sync,
)
if not sync:
test_model.wait()
assert e.match(
regexp=r"This model can not be exported as a container image."
)
@pytest.mark.usefixtures(
"get_training_job_non_existent_mock", "get_model_with_training_job"
)
def test_get_and_return_subclass_not_found(self):
test_model = models.Model(_TEST_ID)
# Attempt to access Model's training job that no longer exists
with pytest.raises(api_exceptions.NotFound) as e:
test_model.training_job
assert e.match(
regexp=(
r"The training job used to create this model could not be found: "
fr"{_TEST_PIPELINE_RESOURCE_NAME}"
)
)
| sasha-gitg/python-aiplatform | tests/unit/aiplatform/test_models.py | Python | apache-2.0 | 57,744 |
# Copyright 2017 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _
from oslo_config import cfg
API_OPTS = [
cfg.StrOpt('ovsdb_connection',
default='tcp:127.0.0.1:6640',
help=_('The connection string for the OVSDB backend. '
'Will be used for all ovsdb commands and '
'by ovsdb-client when monitoring'
)),
cfg.StrOpt('ssl_key_file',
help=_('The SSL private key file to use when interacting with '
'OVSDB. Required when using an "ssl:" prefixed '
'ovsdb_connection'
)),
cfg.StrOpt('ssl_cert_file',
help=_('The SSL certificate file to use when interacting '
'with OVSDB. Required when using an "ssl:" prefixed '
'ovsdb_connection'
)),
cfg.StrOpt('ssl_ca_cert_file',
help=_('The Certificate Authority (CA) certificate to use '
'when interacting with OVSDB. Required when using an '
'"ssl:" prefixed ovsdb_connection'
)),
cfg.BoolOpt('ovsdb_debug',
default=False,
help=_('Enable OVSDB debug logs')),
]
def register_ovsdb_api_opts(cfg=cfg.CONF):
cfg.register_opts(API_OPTS, 'OVS')
| mahak/neutron | neutron/conf/agent/ovsdb_api.py | Python | apache-2.0 | 1,972 |
from server import db
class File(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
identifier = db.Column(db.String, unique=True)
filename = db.Column(db.String)
total_size = db.Column(db.Integer)
file_type = db.Column(db.String)
readset = db.Column(db.String)
platform = db.Column(db.String)
run_type = db.Column(db.String)
capture_kit = db.Column(db.String)
library = db.Column(db.String)
reference = db.Column(db.String)
upload_status = db.Column(db.String)
upload_start_date = db.Column(db.DateTime)
upload_end_date = db.Column(db.DateTime)
is_archived = db.Column(db.Integer, default=0)
user_id = db.Column(db.String, db.ForeignKey('users.user_id'))
access_id = db.Column(db.Integer, db.ForeignKey("access.id"))
class Access(db.Model):
__tablename__ = 'access'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
auth_token = db.Column(db.String, unique=True)
creation_date = db.Column(db.DateTime)
expiration_date = db.Column(db.DateTime)
user_id = db.Column(db.String, db.ForeignKey('users.user_id'))
files = db.relationship(File, backref="access")
class Run(db.Model):
__tablename__ = 'runs'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
readset = db.Column(db.String)
library = db.Column(db.String)
run_type = db.Column(db.String)
bed = db.Column(db.String)
fastq1 = db.Column(db.String)
fastq2 = db.Column(db.String)
bam = db.Column(db.String)
status = db.Column(db.String)
user_id = db.Column(db.String, db.ForeignKey('users.user_id'))
sample_id = db.Column(db.Integer, db.ForeignKey('samples.id'))
sample_file_link = db.Table('sample_file_link',
db.Column('sample_id', db.Integer, db.ForeignKey('samples.id')),
db.Column('file_id', db.Integer, db.ForeignKey('files.id')))
class Sample(db.Model):
__tablename__ = 'samples'
__table_args__ = (db.UniqueConstraint('sample_name', 'user_id', name='sample_id'),)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
sample_name = db.Column(db.String)
user_id = db.Column(db.String, db.ForeignKey('users.user_id'))
files = db.relationship(File, secondary=sample_file_link, backref="samples")
runs = db.relationship(Run, backref="sample")
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
user_id = db.Column(db.String, unique=True)
user_name = db.Column(db.String)
user_email = db.Column(db.String)
server_id = db.Column(db.String, db.ForeignKey('servers.server_id'))
access = db.relationship(Access, backref="user")
samples = db.relationship(Sample, backref="user")
files = db.relationship(File, backref="user")
runs = db.relationship(Run, backref="user")
class Server(db.Model):
__tablename__ = 'servers'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
server_token = db.Column(db.String, unique=True)
server_id = db.Column(db.String)
server_name = db.Column(db.String)
users = db.relationship(User, backref='server')
| sickkids-ccm/dcc-file-transfer | server/models.py | Python | mit | 3,264 |
#!/usr/bin/env python
# How to build source distribution
# - python setup.py sdist --format bztar
# - python setup.py sdist --format gztar
# - python setup.py sdist --format zip
# - python setup.py bdist_wheel
import os
import sys
from setuptools import setup, find_packages
MAJOR = 0
MINOR = 8
MICRO = 1
VERSION = "{0}.{1}.{2}".format(MAJOR, MINOR, MICRO)
def check_python_version():
"""Checks the python version, exits if < 3.4."""
python_major, python_minor = sys.version_info[:2]
if python_major != 3 or python_minor < 4:
sys.stderr.write("geneparse requires python 3 "
"(version 3.4 or higher)\n")
sys.exit(1)
def write_version_file(fn=None):
if fn is None:
fn = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
os.path.join("geneparse", "version.py"),
)
content = ("\n# THIS FILE WAS GENERATED AUTOMATICALLY\n"
'geneparse_version = "{version}"\n')
a = open(fn, "w")
try:
a.write(content.format(version=VERSION))
finally:
a.close()
def setup_package():
# Checking the python version prior to installation
check_python_version()
# Saving the version into a file
write_version_file()
setup(
name="geneparse",
version=VERSION,
description="A suite of parse for genotype formats.",
url="https://github.com/pgxcentre/geneparse",
license="MIT",
test_suite="geneparse.tests.test_suite",
zip_safe=False,
install_requires=["numpy >= 1.11.0", "pandas >= 0.19.0",
"pyplink >= 1.3.4", "setuptools >= 26.1.0",
"biopython >= 1.68", "pybgen >= 0.7.0"],
packages=find_packages(),
package_data={"geneparse.tests": ["data/*", "data/*/*"]},
classifiers=["Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: Free for non-commercial use",
"Operating System :: Unix",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft",
"Programming Language :: Python",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Scientific/Engineering :: Bio-Informatics"],
keywords="bioinformatics genetics statistics",
)
if __name__ == "__main__":
setup_package()
| pgxcentre/geneparse | setup.py | Python | mit | 2,687 |
# vim:ts=4:sw=4:expandtab
'''Example of event firing.
'''
import time
import random
from diesel import (quickstart, quickstop, sleep,
fire, wait, log, loglevels,
set_log_level)
set_log_level(loglevels.DEBUG)
def gunner():
x = 1
while True:
fire('bam', x)
x += 1
sleep()
def sieged():
t = time.time()
while True:
n = wait('bam')
if n % 10000 == 0:
log.info(str(n))
if n == 50000:
delt = time.time() - t
log.debug("50,000 messages in {0:.3f}s {1:.1f}/s)", delt, 50000 / delt)
quickstop()
log = log.name('fire-system')
quickstart(gunner, sieged)
| dieseldev/diesel | examples/fire.py | Python | bsd-3-clause | 716 |
import random
import sys
sys.path.append("..") #so other modules can be found in parent dir
from Player import *
from Constants import *
from Construction import CONSTR_STATS
from Ant import UNIT_STATS
from Move import Move
from GameState import *
from AIPlayerUtils import *
##
#AIPlayer
#Description: The responsbility of this class is to interact with the game by
#deciding a valid move based on a given game state. This class has methods that
#will be implemented by students in Dr. Nuxoll's AI course.
#
#Variables:
# playerId - The id of the player.
##
class AIPlayer(Player):
#__init__
#Description: Creates a new Player
#
#Parameters:
# inputPlayerId - The id to give the new player (int)
##
def __init__(self, inputPlayerId):
super(AIPlayer,self).__init__(inputPlayerId, "Genetic AI")
#List of genes to be tested
self.population = []
#List of scores correlated to genes in population
self.popFitness = []
#Index for which gene to test
self.geneIndex = 0
#Size of the population of genes
self.POP_SIZE = 10
#Number of games played to test each gene
self.NUM_GAMES = 10
#best score we've seen so far
self.bestScore = 0
#save the best state
self.bestState = None
self.cState = None
#Games played so far
self.gamesPlayed = 0
#Mutation Chance
self.MUTATION_CHANCE = 10
self.initPopulation()
##
#initPopulation
#Description: Initializes our population to random values
# And resets our popFitness scores to 0
#
#Parameters: None, this method just intializes instance variables
##
def initPopulation(self):
print "initializing population"
gene = []
self.popFitness = [0]*self.POP_SIZE
#Create a gene for each member of the population
for s in range(0, self.POP_SIZE):
#Create random unique values for the first 11 slots
#These will represent the palcement of grass and hill and tunnel
for i in range(0, 11):
placement = None
while placement == None:
#Choose any x location
x = random.randint(0, 9)
#Choose any y location on our side of the board
y = random.randint(0, 3)
#Set the placement if this is a unique placement
if (x, y) not in gene:
placement = (x, y)
gene.append(placement)
#Create the first enemy food location
x = random.randint(0, 9)
y = random.randint(6, 9)
placement = (x,y)
gene.append(placement)
#Create the second food placement, distinct from the first
done = False
while not done:
x = random.randint(0, 9)
#Choose any y location on enemy side of the board
y = random.randint(6, 9)
#Set the move if != to previous
if (x, y) != placement:
placement = (x, y)
done = True
gene.append(placement)
self.population.append(gene)
gene = []
##
#getPlacement
#
#Description: called during setup phase for each Construction that
# must be placed by the player. These items are: 1 Anthill on
# the player's side; 1 tunnel on player's side; 9 grass on the
# player's side; and 2 food on the enemy's side.
#
#Parameters:
# construction - the Construction to be placed.
# currentState - the state of the game at this point in time.
#
#Return: The coordinates of where the construction is to be placed
##
def getPlacement(self, currentState):
numToPlace = 0
self.cState = currentState
#implemented by students to return their next move
if currentState.phase == SETUP_PHASE_1: #stuff on my side
#slice to index 11
firstSlice = self.population[self.geneIndex][:11]
#return the placement
return firstSlice
elif currentState.phase == SETUP_PHASE_2: #stuff on foe's side
secondSlice = self.population[self.geneIndex][11:]
x = secondSlice[0][0]
y = secondSlice[0][1]
x2 = secondSlice[1][0]
y2 = secondSlice[1][1]
#while it's not empty....
while (currentState.board[x][y].constr != None):
#fix it
x -= 1
while (currentState.board[x2][y2].constr != None):
x2 -= 1
secondSlice[0] = (x,y)
secondSlice[1] = (x2, y2)
return secondSlice
else:
return [(0, 0)]
##
#getMove
#Description: Gets the next move from the Player.
#
#Parameters:
# currentState - The state of the current game waiting for the player's move (GameState)
#
#Return: The Move to be made
##
def getMove(self, currentState):
moves = listAllLegalMoves(currentState)
selectedMove = moves[random.randint(0,len(moves) - 1)];
#don't do a build move if there are already 3+ ants
numAnts = len(currentState.inventories[currentState.whoseTurn].ants)
while (selectedMove.moveType == BUILD and numAnts >= 3):
selectedMove = moves[random.randint(0,len(moves) - 1)];
return selectedMove
##
#getAttack
#Description: Gets the attack to be made from the Player
#
#Parameters:1
# currentState - A clone of the current state (GameState)
# attackingAnt - The ant currently making the attack (Ant)
# enemyLocation - The Locations of the Enemies that can be attacked (Location[])
##
def getAttack(self, currentState, attackingAnt, enemyLocations):
#Attack a random enemy.
return enemyLocations[random.randint(0, len(enemyLocations) - 1)]
##
#registerWin
#Description: Tells the player if they won or not
#
#Parameters:
# hasWon - True if the player won the game. False if they lost (Boolean)
#
def registerWin(self, hasWon):
if hasWon:
print "we won!!"
self.popFitness[self.geneIndex] += 1
self.gamesPlayed += 1
#if we've reached our game limit for this gene, move to the next one
if (self.gamesPlayed == self.NUM_GAMES):
print str(self.popFitness[self.geneIndex]) + " is the score for this gene"
if (self.popFitness[self.geneIndex] > self.bestScore):
self.bestScore = self.popFitness[self.geneIndex]
self.bestState = self.cState
self.geneIndex += 1
self.gamesPlayed = 0
#if we have gone through the whole population, create a new generation
if (self.geneIndex == self.POP_SIZE):
print "jumping to next generation in RegsiterWin"
#output the best state to a file.
if (self.bestState == None):
self.bestState = self.cState
#redirect stdout to make sure we can print the ascii print state to
# a file, then set it back to original to log to the console
original = sys.stdout
sys.stdout = open('evidence3102017.txt', 'a')
asciiPrintState(self.bestState)
sys.stdout = original
self.geneIndex = 0
self.bestState = None
self.bestScore = 0
self.popFitness = [0]*self.POP_SIZE
self.createNextGeneration()
##
#createNextGeneration
#Description: creates the next generation of genes from the population of
# parents using the fittest 4 genes to make another population.
#
# modifies the population instance variable with the newest generation
#
def createNextGeneration(self):
scoredGenes = []
#associate the scores with the right genes, sorted
scoredGenes = zip(self.popFitness, self.population)
scoredGenes = sorted(scoredGenes, key=lambda gene: gene[0])
#variable for next generation
nextGen = []
kids = []
#get the 2 top scoring genes, use them to create 10 more children
best = scoredGenes[0][1]
secondBest = scoredGenes[1][1]
#HARD CODED for population 10
for i in range(3):
#mate 0 with 1, 2, 3
kids = self.createChildren(best, scoredGenes[i+1][1])
nextGen.append(kids[0])
nextGen.append(kids[1])
for i in range(2):
#mate 1 with 2, 3
kids = self.createChildren(secondBest, scoredGenes[i+2][1])
nextGen.append(kids[0])
nextGen.append(kids[1])
#set the population for the next generation
self.population = nextGen
##
#createChildren
#Description: helper method to createNextGeneration, creates 2 child genes
# from 2 parents. swaps gene chunks at a random index with 10% to mutate.
#
#Parameters:
# parent1 - first parent gene
# parent2 - second parent gene
#
# Returns the list of 2 children
#
def createChildren(self,parent1, parent2):
print "creating new children, mixing genes"
children = []
childA = []
childB = []
conflicts = True
#Slicing and Conflict Management
while(conflicts):
conflicts = False
pos1 = random.randint(1,10)
pos2 = random.randint(pos1+1, 12)
childA = parent1[:pos1] + parent2[pos1:pos2] + parent1[pos2:]
childB = parent2[:pos1] + parent1[pos1:pos2] + parent2[pos2:]
if(len(childA[:11]) != len(set(childA[:11])) or len(childB[:11]) != len(set(childB[:11]))):
conflicts = True
if(childA[11] == childA[12] or childB[11] == childB[12]):
conflicts = True
#Mutation
if(random.randrange(100) < self.MUTATION_CHANCE):
conflicts = True
pos = random.randint(0,12)
while conflicts:
conflicts = False
if pos == 11 or pos == 12:
x = random.randint(0, 9)
#Choose any y location on your side of the board
y = random.randint(6, 9)
childA[pos] = (x,y)
if(childA[11] == childA[12]):
conflicts = True
else:
x = random.randint(0, 9)
#Choose any y location on your side of the board
y = random.randint(0, 3)
childA[pos] = (x,y)
if len(childA[:11]) != len(set(childA[:11])):
conflicts = True
children.append(childA)
children.append(childB)
return children
| sundercode/AI-Homework | AI_done/linds17_sunderla17.py | Python | mit | 11,039 |
#!/usr/bin/python2
import re
import sys
part2 = len(sys.argv) > 1 and sys.argv[1] == '2'
f = open('./input16.txt', 'r')
aunt = []
truth = {
'children': 3,
'cats': 7,
'samoyeds': 2,
'pomeranians': 3,
'akitas': 0,
'vizslas': 0,
'goldfish': 5,
'trees': 3,
'cars': 2,
'perfumes': 1,
}
naunt = 1
matches = 0
aunt = 0
while True:
line = f.readline().rstrip()
if not line:
break
clues = {}
for x in re.split(r'\d+: ', line)[1].split(','):
key, val = x.split(":")
key = key.lstrip().rstrip()
val = int(val)
if part2 and ((key in ['cats', 'trees'] and val <= truth[key]) or
(key in ['pomeranians', 'goldfish'] and val >= truth[key]) or
(truth[key] != val)) or truth[key] != val:
continue
clues[key] = val
aunt, matches = (naunt, len(clues)) if len(clues) > matches else (aunt, matches)
naunt += 1
print aunt
| thiagorcdl/AdventOfCode | 2015/adv16.py | Python | gpl-3.0 | 949 |
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os
from vdisk.externalcommand import ExternalCommand
chroot = ExternalCommand("chroot")
def action(ns):
if not os.path.isfile(ns.image_path):
raise Exception("No such file: {0}".format(ns.image_path))
with ns.preset.entered_system() as d:
path = d[2]
exitcode, out, err = chroot(path, ns.shell, raise_on_exit=False)
return exitcode
| spotify/vdisk | vdisk/actions/enter.py | Python | apache-2.0 | 983 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""This routine controls which localizable files and entries are
reported and l10n-merged.
This needs to stay in sync with the copy in mobile/locales.
"""
def test(mod, path, entity = None):
import re
# ignore anything but mobile, which is our local repo checkout name
if mod not in ("netwerk", "dom", "toolkit", "security/manager",
"devtools/shared",
"services/sync", "mobile",
"mobile/android/base", "mobile/android"):
return "ignore"
if mod not in ("mobile", "mobile/android"):
# we only have exceptions for mobile*
return "error"
if mod == "mobile/android":
if not entity:
if (re.match(r"mobile-l10n.js", path) or
re.match(r"defines.inc", path)):
return "ignore"
if path == "defines.inc":
if entity == "MOZ_LANGPACK_CONTRIBUTORS":
return "ignore"
return "error"
# we're in mod == "mobile"
if re.match(r"searchplugins\/.+\.xml", path):
return "ignore"
if path == "chrome/region.properties":
# only region.properties exceptions remain
if (re.match(r"browser\.search\.order\.[1-9]", entity) or
re.match(r"browser\.search\.[a-zA-Z]+\.US", entity) or
re.match(r"browser\.contentHandlers\.types\.[0-5]", entity) or
re.match(r"gecko\.handlerService\.schemes\.", entity) or
re.match(r"gecko\.handlerService\.defaultHandlersVersion", entity) or
re.match(r"browser\.suggestedsites\.", entity)):
return "ignore"
return "error"
| Yukarumya/Yukarum-Redfoxes | mobile/android/locales/filter.py | Python | mpl-2.0 | 1,714 |
import numpy as np
import os
print("--> Loading parameters...")
global par
"""
Independent parameters
"""
par = {
# Setup parameters
'save_dir' : './save_dir/',
'data_dir' : './data_dir/',
'data_filenames' : ['data_even.mat', 'data_odd.mat'],
'debug_model' : False,
'load_previous_model' : False,
'ckpt_load_fn' : 'model.ckpt',
'ckpt_save_fn' : 'model.ckpt',
# Network configuration
'layer_dims' : [9*12-2,120,100,80,50,30,1],
'init_weight_sd' : 0.05,
'nonlinearity' : 'sigmoid',
'learning_rate' : 1e-3,
'num_iterations' : 100000,
'iters_between_eval' : 250,
'batch_size' : 100,
'hist_size' : 12,
'test_reps' : 50,
# Dropout
'keep_prob' : 1
}
def update_dependencies():
"""
Updates all parameter dependencies
"""
par['num_layers'] = len(par['layer_dims'])
update_dependencies()
print("--> Parameters successfully loaded.\n")
| nmasse/Feed-forward-network | parameters.py | Python | apache-2.0 | 1,088 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_pgjson.fields
class Migration(migrations.Migration):
dependencies = [
('projects', '0010_project_modules_config'),
]
operations = [
migrations.CreateModel(
name='ProjectModulesConfig',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)),
('config', django_pgjson.fields.JsonField(null=True, verbose_name='modules config', blank=True)),
('project', models.OneToOneField(to='projects.Project', verbose_name='project', related_name='modules_config')),
],
options={
'verbose_name_plural': 'project modules configs',
'verbose_name': 'project modules config',
'ordering': ['project'],
},
bases=(models.Model,),
),
migrations.RemoveField(
model_name='project',
name='modules_config',
),
]
| CoolCloud/taiga-back | taiga/projects/migrations/0011_auto_20141028_2057.py | Python | agpl-3.0 | 1,109 |
#!/usr/bin/env python
from database.sqconfig import TUB_Base, TB15_Base, TB16_Base
from sqlalchemy import Column, Integer, Sequence, String, DateTime
class Tuberculose(TUB_Base):
__tablename__ = 'tuberculose'
__table_args__ = {'sqlite_autoincrement': True}
id = Column('id', Integer, Sequence('id_seq'), primary_key=True)
requisicao = Column(String(12), index=True)
data_cadastro = Column(DateTime)
paciente = Column(String(120))
idade = Column(String(2))
tipo_idade = Column(String(2))
sexo = Column(String(1))
ibge = Column(String(50))
municipio = Column(String(50))
regional = Column(String(2))
laboratorio = Column(String(80))
amostra = Column(String(1))
material = Column(String(60))
data_coleta = Column(DateTime)
data_recebimento = Column(DateTime)
data_sintomas = Column(DateTime)
finalidade = Column(String(50))
tratamento = Column(String(20))
tempo_tratamento = Column(String(2))
periodo_tratamento = Column(String(8))
tipo_populacao = Column(String(100))
droga_resistente = Column(String(8))
exame = Column(String(100))
metodo = Column(String(100))
data_resultado = Column(DateTime)
data_liberacao = Column(DateTime)
tempo_transporte = Column(Integer)
tempo_processamento = Column(Integer)
tempo_liberacao = Column(Integer)
resultado = Column(String(100))
def __init__(self, requisicao=None, data_cadastro=None, paciente=None,
idade=None, tipo_idade=None, sexo=None, ibge=None, municipio=None,
regional=None, laboratorio=None, amostra=None, material=None,
data_coleta=None, data_recebimento=None, data_sintomas=None,
finalidade=None, tratamento=None, tempo_tratamento=None,
periodo_tratamento=None, tipo_populacao=None, droga_resistente=None,
exame=None, metodo=None, data_resultado=None, data_liberacao=None,
tempo_transporte=None, tempo_processamento=None, tempo_liberacao=None,
resultado=None):
self.requisicao=requisicao
self.data_cadastro=data_cadastro
self.paciente=paciente
self.idade=idade
self.tipo_idade=tipo_idade
self.sexo=sexo
self.ibge=ibge
self.municipio=municipio
self.regional=regional
self.laboratorio=laboratorio
self.amostra=amostra
self.material=material
self.data_coleta=data_coleta
self.data_recebimento=data_recebimento
self.data_sintomas=data_sintomas
self.finalidade=finalidade
self.tratamento=tratamento
self.tempo_tratamento=tempo_tratamento
self.periodo_tratamento=periodo_tratamento
self.tipo_populaca=tipo_populacao
self.droga_resistente=droga_resistente
self.exame=exame
self.metodo=metodo
self.data_resultado=data_resultado
self.data_liberacao=data_liberacao
self.tempo_transporte=tempo_transporte
self.tempo_processamento=tempo_processamento
self.tempo_liberacao=tempo_liberacao
self.resultado=resultado
def __repr__(self):
return " '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', \
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', \
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s'" % (
self.requisicao, self.data_cadastro, self.paciente, self.idade,
self.tipo_idade, self.sexo, self.ibge, self.municipio, self.regional,
self.laboratorio, self.amostra, self.material, self.data_coleta,
self.data_recebimento, self.data_sintomas, self.finalidade,
self.tratamento, self.tempo_tratamento, self.periodo_tratamento,
self.tipo_populacao, self.droga_resistente, self.exame,
self.metodo, self.data_resultado, self.data_liberacao, self.tempo_transporte,
self.tempo_processamento, self.tempo_liberacao, self.resultado)
@staticmethod
def get():
from database.sqconfig import TUB_Session
tub = TUB_Session.query(Tuberculose).all()
for tuberculose in tub:
return tuberculose
class Sintomatico(TUB_Base):
__tablename__ = 'sintomatico'
__table_args__ = {'sqlite_autoincrement': True}
id = Column('id', Integer, Sequence('id_seq'), primary_key=True)
requisicao = Column(String(12), index=True)
paciente = Column(String(120))
ibge = Column(String(50))
municipio = Column(String(50))
regional = Column(String(2))
amostra = Column(String(1))
material = Column(String(60))
data_coleta = Column(String(12))
data_recebimento = Column(String(12))
data_liberacao = Column(String(12))
exame = Column(String(50))
resultado = Column(String(100))
def __init__(self, requisicao=None, paciente=None, ibge=None, municipio=None,
regional=None, amostra=None, material=None, data_coleta=None,
data_recebimento=None, data_liberacao=None, exame=None, resultado=None):
self.requisicao=requisicao
self.paciente=paciente
self.ibge=ibge
self.municipio=municipio
self.regional=regional
self.amostra=amostra
self.material=material
self.data_coleta=data_coleta
self.data_recebimento=data_recebimento
self.data_liberacao=data_liberacao
self.exame=exame
self.resultado=resultado
def __repr__(self):
return " '%s', '%s', '%s', '%s', '%s', '%s', '%s', " \
"'%s', '%s', '%s', '%s', '%s'" % (
self.requisicao, self.paciente, self.ibge, self.municipio, self.regional,
self.amostra, self.material, self.data_coleta, self.data_recebimento,
self.data_liberacao, self.exame, self.resultado)
@staticmethod
def get():
from database.sqconfig import TUB_Session
sr = TUB_Session.query(Sintomatico).all()
for sintomatico in sr:
return sintomatico
class Tuberculose_2015(TB15_Base):
__tablename__ = 'tuberculose'
__table_args__ = {'sqlite_autoincrement': True}
id = Column('id', Integer, Sequence('id_seq'), primary_key=True)
requisicao = Column(String(12), index=True)
data_cadastro = Column(DateTime)
paciente = Column(String(120))
idade = Column(String(2))
tipo_idade = Column(String(2))
sexo = Column(String(1))
ibge = Column(String(50))
municipio = Column(String(50))
regional = Column(String(2))
laboratorio = Column(String(80))
amostra = Column(String(1))
material = Column(String(60))
data_coleta = Column(DateTime)
data_recebimento = Column(DateTime)
data_sintomas = Column(DateTime)
finalidade = Column(String(50))
tratamento = Column(String(20))
tempo_tratamento = Column(String(2))
periodo_tratamento = Column(String(8))
tipo_populacao = Column(String(100))
droga_resistente = Column(String(8))
exame = Column(String(100))
metodo = Column(String(100))
data_resultado = Column(DateTime)
data_liberacao = Column(DateTime)
tempo_transporte = Column(Integer)
tempo_processamento = Column(Integer)
tempo_liberacao = Column(Integer)
resultado = Column(String(100))
def __init__(self, requisicao=None, data_cadastro=None, paciente=None,
idade=None, tipo_idade=None, sexo=None, ibge=None, municipio=None,
regional=None, laboratorio=None, amostra=None, material=None,
data_coleta=None, data_recebimento=None, data_sintomas=None,
finalidade=None, tratamento=None, tempo_tratamento=None,
periodo_tratamento=None, tipo_populacao=None, droga_resistente=None,
exame=None, metodo=None, data_resultado=None, data_liberacao=None,
tempo_transporte=None, tempo_processamento=None, tempo_liberacao=None,
resultado=None):
self.requisicao=requisicao
self.data_cadastro=data_cadastro
self.paciente=paciente
self.idade=idade
self.tipo_idade=tipo_idade
self.sexo=sexo
self.ibge=ibge
self.municipio=municipio
self.regional=regional
self.laboratorio=laboratorio
self.amostra=amostra
self.material=material
self.data_coleta=data_coleta
self.data_recebimento=data_recebimento
self.data_sintomas=data_sintomas
self.finalidade=finalidade
self.tratamento=tratamento
self.tempo_tratamento=tempo_tratamento
self.periodo_tratamento=periodo_tratamento
self.tipo_populaca=tipo_populacao
self.droga_resistente=droga_resistente
self.exame=exame
self.metodo=metodo
self.data_resultado=data_resultado
self.data_liberacao=data_liberacao
self.tempo_transporte=tempo_transporte
self.tempo_processamento=tempo_processamento
self.tempo_liberacao=tempo_liberacao
self.resultado=resultado
def __repr__(self):
return " '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', \
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', \
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s'" % (
self.requisicao, self.data_cadastro, self.paciente, self.idade,
self.tipo_idade, self.sexo, self.ibge, self.municipio, self.regional,
self.laboratorio, self.amostra, self.material, self.data_coleta,
self.data_recebimento, self.data_sintomas, self.finalidade,
self.tratamento, self.tempo_tratamento, self.periodo_tratamento,
self.tipo_populacao, self.droga_resistente, self.exame,
self.metodo, self.data_resultado, self.data_liberacao, self.tempo_transporte,
self.tempo_processamento, self.tempo_liberacao, self.resultado)
@staticmethod
def get():
from database.sqconfig import TB15_Session
tub = TB15_Session.query(Tuberculose).all()
for tuberculose in tub:
return tuberculose
class Tuberculose_2016(TB16_Base):
__tablename__ = 'tuberculose'
__table_args__ = {'sqlite_autoincrement': True}
id = Column('id', Integer, Sequence('id_seq'), primary_key=True)
requisicao = Column(String(12), index=True)
data_cadastro = Column(DateTime)
paciente = Column(String(120))
idade = Column(String(2))
tipo_idade = Column(String(2))
sexo = Column(String(1))
ibge = Column(String(50))
municipio = Column(String(50))
regional = Column(String(2))
laboratorio = Column(String(80))
amostra = Column(String(1))
material = Column(String(60))
data_coleta = Column(DateTime)
data_recebimento = Column(DateTime)
data_sintomas = Column(DateTime)
finalidade = Column(String(50))
tratamento = Column(String(20))
tempo_tratamento = Column(String(2))
periodo_tratamento = Column(String(8))
tipo_populacao = Column(String(100))
droga_resistente = Column(String(8))
exame = Column(String(100))
metodo = Column(String(100))
data_resultado = Column(DateTime)
data_liberacao = Column(DateTime)
tempo_transporte = Column(Integer)
tempo_processamento = Column(Integer)
tempo_liberacao = Column(Integer)
resultado = Column(String(100))
def __init__(self, requisicao=None, data_cadastro=None, paciente=None,
idade=None, tipo_idade=None, sexo=None, ibge=None, municipio=None,
regional=None, laboratorio=None, amostra=None, material=None,
data_coleta=None, data_recebimento=None, data_sintomas=None,
finalidade=None, tratamento=None, tempo_tratamento=None,
periodo_tratamento=None, tipo_populacao=None, droga_resistente=None,
exame=None, metodo=None, data_resultado=None, data_liberacao=None,
tempo_transporte=None, tempo_processamento=None, tempo_liberacao=None,
resultado=None):
self.requisicao=requisicao
self.data_cadastro=data_cadastro
self.paciente=paciente
self.idade=idade
self.tipo_idade=tipo_idade
self.sexo=sexo
self.ibge=ibge
self.municipio=municipio
self.regional=regional
self.laboratorio=laboratorio
self.amostra=amostra
self.material=material
self.data_coleta=data_coleta
self.data_recebimento=data_recebimento
self.data_sintomas=data_sintomas
self.finalidade=finalidade
self.tratamento=tratamento
self.tempo_tratamento=tempo_tratamento
self.periodo_tratamento=periodo_tratamento
self.tipo_populaca=tipo_populacao
self.droga_resistente=droga_resistente
self.exame=exame
self.metodo=metodo
self.data_resultado=data_resultado
self.data_liberacao=data_liberacao
self.tempo_transporte=tempo_transporte
self.tempo_processamento=tempo_processamento
self.tempo_liberacao=tempo_liberacao
self.resultado=resultado
def __repr__(self):
return " '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', \
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', \
'%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s'" % (
self.requisicao, self.data_cadastro, self.paciente, self.idade,
self.tipo_idade, self.sexo, self.ibge, self.municipio, self.regional,
self.laboratorio, self.amostra, self.material, self.data_coleta,
self.data_recebimento, self.data_sintomas, self.finalidade,
self.tratamento, self.tempo_tratamento, self.periodo_tratamento,
self.tipo_populacao, self.droga_resistente, self.exame,
self.metodo, self.data_resultado, self.data_liberacao, self.tempo_transporte,
self.tempo_processamento, self.tempo_liberacao, self.resultado)
@staticmethod
def get():
from database.sqconfig import TB16_Session
tub = TB16_Session.query(Tuberculose).all()
for tuberculose in tub:
return tuberculose | ricardobergamo/dataGAL | models/tuberculose.py | Python | gpl-3.0 | 14,293 |
#!/usr/bin/env python
# coding=utf-8
"""659. Largest prime
https://projecteuler.net/problem=659
Consider the sequence $n^2+3$ with $n \ge 1$.
If we write down the first terms of this sequence we get:
$4, 7, 12, 19, 28, 39, 52, 67, 84, 103, 124, 147, 172, 199, 228, 259, 292,
327, 364,$... .
We see that the terms for $n=6$ and $n=7$ ($39$ and $52$) are both divisible
by $13$.
In fact $13$ is the largest prime dividing any two successive terms of this
sequence.
Let $P(k)$ be the largest prime that divides any two successive terms of the
sequence $n^2+k^2$.
Find the last 18 digits of $\displaystyle \sum_{k=1}^{10\,000\,000} P(k)$.
"""
| openqt/algorithms | projecteuler/pe659-largest-prime.py | Python | gpl-3.0 | 651 |
import cloudbridge.cloud.providers.azure.test.helpers as helpers
from cloudbridge.cloud.providers.azure.test.helpers import ProviderTestBase
class AzureKeyPairServiceTestCase(ProviderTestBase):
@helpers.skipIfNoService(['security.key_pairs'])
def test_azure_keypair_create(self):
key_pair_create = self.provider.security.key_pairs.create('NewKeyPair')
print("Create Key Pair - " + str(key_pair_create))
self.assertIsNotNone(key_pair_create)
self.assertIsNotNone(key_pair_create)
self.assertIsNotNone(key_pair_create.id)
self.assertIsNotNone(key_pair_create.material)
@helpers.skipIfNoService(['security.key_pairs'])
def test_azure_keypair_create_Exist(self):
with self.assertRaises(Exception) as context:
self.provider.security.key_pairs.create('KeyPair1')
self.assertTrue(
'Keypair already exists' in context.exception)
@helpers.skipIfNoService(['security.key_pairs'])
def test_azure_keypair_list(self):
key_pair_list = self.provider.security.key_pairs.list()
print("List Key Pairs - " + str(key_pair_list))
self.assertTrue(key_pair_list.total_results > 0)
@helpers.skipIfNoService(['security.key_pairs'])
def test_azure_keypair_get_exist_and_delete(self):
keypair_id = 'KeyPair1'
keypair_get = self.provider.security.key_pairs.get(keypair_id)
print("Get Key Pair - " + str(keypair_get))
self.assertIsNotNone(keypair_get)
keypair_get.delete()
@helpers.skipIfNoService(['security.key_pairs'])
def test_azure_keypair_get_notExist(self):
keypair_id = 'KeyPairNotExist'
keypair_get_not_exist = self.provider.security. \
key_pairs.get(keypair_id)
print("Get Key Pair Not Exist - " + str(keypair_get_not_exist))
self.assertIsNone(keypair_get_not_exist)
@helpers.skipIfNoService(['security.key_pairs'])
def test_azure_keypair_find(self):
keypair_name = 'KeyPair1'
keypair_find = self.provider.security.key_pairs.find(keypair_name)
print("Find Key Pair - " + str(keypair_find))
self.assertTrue(len(keypair_find) > 0)
| ms-azure-cloudbroker/cloudbridge | cloudbridge/cloud/providers/azure/test/test_azure_key_pair_service.py | Python | mit | 2,201 |
import paddle.v2 as paddle
import paddle.v2.framework.layers as layers
import paddle.v2.framework.core as core
import paddle.v2.framework.optimizer as optimizer
from paddle.v2.framework.framework import Program
from paddle.v2.framework.executor import Executor
from paddle.v2.framework.regularizer import L2DecayRegularizer
from paddle.v2.framework.initializer import UniformInitializer
import numpy as np
BATCH_SIZE = 128
init_program = Program()
program = Program()
image = layers.data(
name='x',
shape=[784],
data_type='float32',
program=program,
init_program=init_program)
param_attr = {
'name': None,
'initializer': UniformInitializer(
low=-1.0, high=1.0),
'regularization': L2DecayRegularizer(0.0005 * BATCH_SIZE)
}
hidden1 = layers.fc(input=image,
size=128,
act='relu',
program=program,
init_program=init_program,
param_attr=param_attr)
hidden2 = layers.fc(input=hidden1,
size=64,
act='relu',
program=program,
init_program=init_program,
param_attr=param_attr)
predict = layers.fc(input=hidden2,
size=10,
act='softmax',
program=program,
init_program=init_program,
param_attr=param_attr)
label = layers.data(
name='y',
shape=[1],
data_type='int64',
program=program,
init_program=init_program)
cost = layers.cross_entropy(
input=predict, label=label, program=program, init_program=init_program)
avg_cost = layers.mean(x=cost, program=program, init_program=init_program)
sgd_optimizer = optimizer.SGDOptimizer(learning_rate=0.001)
opts = sgd_optimizer.minimize(avg_cost)
train_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.mnist.train(), buf_size=8192),
batch_size=BATCH_SIZE)
place = core.CPUPlace()
exe = Executor(place)
exe.run(init_program, feed={}, fetch_list=[])
PASS_NUM = 100
for pass_id in range(PASS_NUM):
for data in train_reader():
x_data = np.array(map(lambda x: x[0], data)).astype("float32")
y_data = np.array(map(lambda x: x[1], data)).astype("int64")
y_data = np.expand_dims(y_data, axis=1)
tensor_x = core.LoDTensor()
tensor_x.set(x_data, place)
tensor_y = core.LoDTensor()
tensor_y.set(y_data, place)
outs = exe.run(program,
feed={'x': tensor_x,
'y': tensor_y},
fetch_list=[avg_cost])
out = np.array(outs[0])
if out[0] < 5.0:
exit(0) # if avg cost less than 5.0, we think our code is good.
exit(1)
| pengli09/Paddle | python/paddle/v2/framework/tests/test_recognize_digits_mlp.py | Python | apache-2.0 | 2,803 |
"""
Project Euler - Problem 6
Copyright (C) 2014 Thomas Vanesse
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from __future__ import division
from pylab import *
from numpy import power # because for some reason pylab calls numpy.random.power by default
def p6():
'''
The sum of the squares of the first ten natural numbers is 385
The square of the sum of the first ten natural numbers is 3025
Hence the difference between the sum of the squares of the first ten
natural numbers and the square of the sum is 3025 - 385 = 2640.
Find the difference between the sum of the squares of the first one hundred
natural numbers and the square of the sum.
'''
my_range = arange(1,101,1)
return power(sum(my_range), 2) - sum(power(my_range, 2))
print(p6())
| tvanesse/projectEuler | p6.py | Python | gpl-2.0 | 1,413 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, no-member, too-many-locals, too-many-arguments, undefined-variable
"""SSD multibox operators"""
import tvm
from tvm.te import hybrid
from tvm.tir import exp, sqrt
from tvm import topi
from ..nms import non_max_suppression
@hybrid.script
def hybrid_multibox_prior(data, sizes, ratios, steps, offsets):
"""Hybrid routing for multibox_prior operator.
Parameters
----------
data : tvm.te.Tensor or numpy NDArray
4-D tensor with shape [batch, channel, height, width]]
sizes : tvm ConsExpr
Sizes for anchor boxes.
ratios : tvm ConsExpr
Ratios for anchor boxes.
steps : tvm ConsExpr
Priorbox step across y and x, -1 for auto calculation.
offsets : tvm ConsExpr
Priorbox center offsets, y and x respectively.
Returns
-------
output : tvm.te.Tensor or numpy NDArray
3-D tensor with shape [1, h_in * w_in * (num_sizes + num_ratios - 1), 4]
"""
in_height = data.shape[2]
in_width = data.shape[3]
num_sizes = len(sizes)
num_ratios = len(ratios)
num_boxes = in_height * in_width * (num_sizes + num_ratios - 1)
output = output_tensor((1, num_boxes, 4), "float32")
steps_h = steps[0] * 1.0 if steps[0] > 0 else 1.0 / in_height
steps_w = steps[1] * 1.0 if steps[1] > 0 else 1.0 / in_width
offset_h = offsets[0]
offset_w = offsets[1]
# Need to define var out of const_range + if
w = 0.0
h = 0.0
for i in parallel(in_height):
center_h = (i + offset_h) * steps_h
for j in range(in_width):
center_w = (j + offset_w) * steps_w
for k in const_range(num_sizes + num_ratios - 1):
if k < num_sizes:
w = float32(sizes[k] * in_height) / in_width / 2.0
h = sizes[k] / 2.0
else:
w = (
float32(sizes[0] * in_height)
/ in_width
* sqrt(ratios[k - num_sizes + 1] * 1.0)
/ 2.0
)
h = sizes[0] / sqrt(ratios[k - num_sizes + 1] * 1.0) / 2.0
count = (
i * in_width * (num_sizes + num_ratios - 1)
+ j * (num_sizes + num_ratios - 1)
+ k
)
output[0, count, 0] = center_w - w
output[0, count, 1] = center_h - h
output[0, count, 2] = center_w + w
output[0, count, 3] = center_h + h
return output
def multibox_prior(data, sizes=(1,), ratios=(1,), steps=(-1, -1), offsets=(0.5, 0.5), clip=False):
"""Generate prior(anchor) boxes from data, sizes and ratios.
Parameters
----------
data : tvm.te.Tensor
4-D with shape [batch, c_in, h_in, w_in]]
sizes : tuple of float
Tuple of sizes for anchor boxes.
ratios : tuple of float
Tuple of ratios for anchor boxes.
steps : Tuple of float
Priorbox step across y and x, -1 for auto calculation.
offsets : tuple of int
Priorbox center offsets, y and x respectively.
clip : boolean
Whether to clip out-of-boundary boxes.
Returns
-------
out : tvm.te.Tensor
3-D tensor with shape [1, h_in * w_in * (num_sizes + num_ratios - 1), 4]
"""
out = hybrid_multibox_prior(
data,
tvm.runtime.convert(sizes),
tvm.runtime.convert(ratios),
tvm.runtime.convert(steps),
tvm.runtime.convert(offsets),
)
if clip:
out = topi.clip(out, 0, 1)
return out
@hybrid.script
def _hybridy_transform_loc(box, pred_loc, variance, clip):
"""Transform prior anchor box to output box through location predictions."""
al = box[0]
at = box[1]
ar = box[2]
ab = box[3]
px = pred_loc[0]
py = pred_loc[1]
pw = pred_loc[2]
ph = pred_loc[3]
vx = variance[0]
vy = variance[1]
vw = variance[2]
vh = variance[3]
output = output_tensor((4,), pred_loc.dtype)
aw = ar - al
ah = ab - at
ax = (al + ar) / 2.0
ay = (at + ab) / 2.0
ox = px * vx * aw + ax
oy = py * vy * ah + ay
ow = exp(pw * vw) * aw / 2.0
oh = exp(ph * vh) * ah / 2.0
output[0] = max(0.0, min(1.0, ox - ow)) if clip else ox - ow
output[1] = max(0.0, min(1.0, oy - oh)) if clip else oy - oh
output[2] = max(0.0, min(1.0, ox + ow)) if clip else ox + ow
output[3] = max(0.0, min(1.0, oy + oh)) if clip else oy + oh
return output
@hybrid.script
def hybrid_multibox_transform_loc(cls_prob, loc_pred, anchor, clip, threshold, variances):
"""Hybrid routing for transform location in multibox_detection operator.
Parameters
----------
cls_prob : tvm.te.Tensor or numpy NDArray
3-D tensor of class probabilities.
loc_pred : tvm.te.Tensor or numpy NDArray
2-D tensor of location regression predictions.
anchor : tvm.te.Tensor or numpy NDArray
3-D tensor of prior anchor boxes.
clip : tvm.tir.const
Whether to clip out-of-boundary boxes.
threshold : tvm.tir.const
Threshold to be a positive prediction.
variances : tvm.nd.NDArray
Variances to be decoded from box regression output.
Returns
-------
out_loc : tvm.te.Tensor or numpy NDArray
3-D tensor of transformed location.
valid_count : tvm.te.Tensor or numpy NDArray
1_d tensor of valid counts for boxes.
"""
batch_size = cls_prob.shape[0]
num_classes = cls_prob.shape[1]
num_anchors = cls_prob.shape[2]
box_coord = allocate((4,), loc_pred.dtype)
pred_coord = allocate((4,), loc_pred.dtype)
out_loc = output_tensor((batch_size, num_anchors, 6), loc_pred.dtype)
valid_count = output_tensor((batch_size,), "int32")
for i in parallel(batch_size):
valid_count[i] = 0
for j in range(num_anchors):
# Find the predicted class id and probability
score = -1.0
cls_id = 0
for k in range(num_classes):
if k > 0:
temp = cls_prob[i, k, j]
cls_id = k if temp > score else cls_id
score = max(temp, score)
if cls_id > 0 and score < threshold:
cls_id = 0
# [id, prob, xmin, ymin, xmax, ymax]
# Remove background, restore original id
if cls_id > 0:
out_loc[i, valid_count[i], 0] = cls_id - 1.0
out_loc[i, valid_count[i], 1] = score
for l in range(4):
box_coord[l] = anchor[0, j, l]
pred_coord[l] = loc_pred[i, j * 4 + l]
out_coord = _hybridy_transform_loc(box_coord, pred_coord, variances, clip)
out_loc[i, valid_count[i], 2] = out_coord[0]
out_loc[i, valid_count[i], 3] = out_coord[1]
out_loc[i, valid_count[i], 4] = out_coord[2]
out_loc[i, valid_count[i], 5] = out_coord[3]
valid_count[i] += 1
return out_loc, valid_count
def multibox_transform_loc(
cls_prob, loc_pred, anchor, clip=True, threshold=0.01, variances=(0.1, 0.1, 0.2, 0.2)
):
"""Location transformation for multibox detection
Parameters
----------
cls_prob : tvm.te.Tensor
Class probabilities.
loc_pred : tvm.te.Tensor
Location regression predictions.
anchor : tvm.te.Tensor
Prior anchor boxes.
clip : boolean
Whether to clip out-of-boundary boxes.
threshold : float
Threshold to be a positive prediction.
variances : tuple of float
Variances to be decoded from box regression output.
Returns
-------
ret : tuple of tvm.te.Tensor
"""
return hybrid_multibox_transform_loc(
cls_prob,
loc_pred,
anchor,
tvm.tir.const(clip, "bool"),
tvm.tir.const(threshold, "float32"),
tvm.runtime.convert(variances),
)
def multibox_detection(
cls_prob,
loc_pred,
anchor,
clip=True,
threshold=0.01,
nms_threshold=0.5,
force_suppress=False,
variances=(0.1, 0.1, 0.2, 0.2),
nms_topk=-1,
):
"""Convert multibox detection predictions.
Parameters
----------
cls_prob : tvm.te.Tensor
Class probabilities.
loc_pred : tvm.te.Tensor
Location regression predictions.
anchor : tvm.te.Tensor
Prior anchor boxes.
clip : boolean
Whether to clip out-of-boundary boxes.
nms_threshold : float
Non-maximum suppression threshold.
force_suppress : boolean
Whether to suppress all detections regardless of class_id.
threshold : float
Threshold to be a positive prediction.
variances : tuple of float
Variances to be decoded from box regression output.
nms_topk : int
Keep maximum top k detections before nms, -1 for no limit.
Returns
-------
out : tvm.te.Tensor
3-D tensor with shape (batch_size, num_anchors, 6)
"""
inter_out = multibox_transform_loc(cls_prob, loc_pred, anchor, clip, threshold, variances)
out = non_max_suppression(
inter_out[0],
inter_out[1],
inter_out[1],
max_output_size=-1,
iou_threshold=nms_threshold,
force_suppress=force_suppress,
top_k=nms_topk,
return_indices=False,
)
return out
| dmlc/tvm | python/tvm/topi/vision/ssd/multibox.py | Python | apache-2.0 | 10,269 |
# encoding: utf-8
"""
routerid.py
Created by Thomas Mangin on 2012-07-17.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
from exabgp.protocol.ip import IPv4
# ===================================================================== RouterID
#
class RouterID (IPv4):
@classmethod
def unpack (cls,data):
return cls('.'.join(str(ord(_)) for _ in data),data)
| lochiiconnectivity/exabgp | lib/exabgp/bgp/message/open/routerid.py | Python | bsd-3-clause | 376 |
import pandas
import numpy
import itertools
import sklearn
import sklearn.tree
import sklearn.svm
from sklearn.ensemble import AdaBoostClassifier
from sklearn.cluster import KMeans
import sklearn.multiclass
import sklearn.metrics
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
start = pandas.Timestamp.now()
data = pandas.read_csv('C:/Users/user/Documents/train.csv', nrows=5000, error_bad_lines=False, warn_bad_lines=False)
data.date_time = pandas.to_datetime(data.date_time)
data = data.assign(date_year = [date.year for date in data.date_time])
data = data.assign(date_month = [date.month for date in data.date_time])
data = data.assign(date_day = [date.day for date in data.date_time])
data.visitor_hist_starrating[(data.visitor_hist_starrating > 0)==False] = 0
data.visitor_hist_adr_usd[(data.visitor_hist_adr_usd > 0)==False] = 0
data.prop_location_score2[(data.prop_location_score2 > 0)==False] = 0.0
data.prop_location_score1 += data.prop_location_score2
data.srch_query_affinity_score = numpy.exp(data.srch_query_affinity_score)
data.srch_query_affinity_score[(data.srch_query_affinity_score > 0)==False] = 0.0
X = data[data.orig_destination_distance > 0]
Y = data.drop(list(X.axes[0]))
Cluster = KMeans(n_clusters=10).fit(X.xs(['site_id', 'visitor_location_country_id', 'prop_country_id', 'srch_destination_id'], axis=1), X.orig_destination_distance.astype(int))
Cluster_predict = KMeans(n_clusters=10).fit_predict(X.xs(['site_id', 'visitor_location_country_id', 'prop_country_id', 'srch_destination_id'], axis=1), X.orig_destination_distance.astype(int))
center = numpy.ones(10)
for i in range(10):
center[i] = numpy.mean((Cluster_predict==i) * X.orig_destination_distance)
Cluster_predict = Cluster.predict(Y.xs(['site_id', 'visitor_location_country_id', 'prop_country_id', 'srch_destination_id'], axis=1))
data.orig_destination_distance[(data.orig_destination_distance > 0)==False] = numpy.sum((numpy.full([10,len(Cluster_predict)], Cluster_predict).T == numpy.full([len(Cluster_predict), 10], numpy.arange(10))) * center, axis=1)
del X, Y, Cluster_predict, center, data['srch_id'], data['date_time'], data['prop_location_score2']
data = data.assign(rate_percent_diff = numpy.zeros(data.shape[0]))
data.rate_percent_diff += data.comp1_rate.fillna(0.0) * data.comp1_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp2_rate.fillna(0.0) * data.comp2_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp3_rate.fillna(0.0) * data.comp3_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp4_rate.fillna(0.0) * data.comp4_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp5_rate.fillna(0.0) * data.comp5_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp6_rate.fillna(0.0) * data.comp6_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp7_rate.fillna(0.0) * data.comp7_rate_percent_diff.fillna(0.0)
data.rate_percent_diff += data.comp8_rate.fillna(0.0) * data.comp8_rate_percent_diff.fillna(0.0)
data.comp1_inv = (data.comp1_inv > 0) * 1
data.comp2_inv = (data.comp2_inv > 0) * 1
data.comp3_inv = (data.comp3_inv > 0) * 1
data.comp4_inv = (data.comp4_inv > 0) * 1
data.comp5_inv = (data.comp5_inv > 0) * 1
data.comp6_inv = (data.comp6_inv > 0) * 1
data.comp7_inv = (data.comp7_inv > 0) * 1
data.comp8_inv = (data.comp8_inv > 0) * 1
del data['comp1_rate'], data['comp1_rate_percent_diff']
del data['comp2_rate'], data['comp2_rate_percent_diff']
del data['comp3_rate'], data['comp3_rate_percent_diff']
del data['comp4_rate'], data['comp4_rate_percent_diff']
del data['comp5_rate'], data['comp5_rate_percent_diff']
del data['comp6_rate'], data['comp6_rate_percent_diff']
del data['comp7_rate'], data['comp7_rate_percent_diff']
del data['comp8_rate'], data['comp8_rate_percent_diff']
data.gross_bookings_usd = data.gross_bookings_usd.fillna(0.0)
data = data.dropna()
data = pandas.get_dummies(data, prefix=['site_id', 'visitor_location_country_id', 'prop_country_id'], columns=['site_id', 'visitor_location_country_id', 'prop_country_id'])
def random_sampling(data, percent_for_train=0.6):
click = data[data.click_bool==1]
click_train = click.sample(frac = percent_for_train)
click_test = click.drop(list(click_train.axes[0]))
not_click = data[data.click_bool==0]
not_click_train = not_click.sample(frac = percent_for_train)
not_click_test = not_click.drop(list(not_click_train.axes[0]))
train = click_train.sample(n=not_click_train.shape[0], replace=True).append(not_click_train, ignore_index=True)
test = click_test.append(not_click_test, ignore_index=True)
return [train, test]
sampling = random_sampling(data, 0.6)
x_train = sampling[0].drop(['click_bool', 'gross_bookings_usd', 'booking_bool'], axis=1)
y_train = sampling[0].booking_bool
x_test = sampling[1].drop(['click_bool', 'gross_bookings_usd', 'booking_bool'], axis=1)
y_test = sampling[1].booking_bool
keys = list(x_train.keys())
Ada = sklearn.model_selection.GridSearchCV(AdaBoostClassifier(), {'n_estimators':[10,20,50], 'learning_rate':[0.001,0.01,0.5,1]}, cv=3).fit(x_train, y_train).best_estimator_
Ada_predict = Ada.predict(x_test)
Ada_predict_proba = Ada.predict_proba(x_test)
Ada_ROC = sklearn.metrics.roc_curve(y_test, Ada_predict_proba[:,1])
Ada_confusion = (sklearn.metrics.confusion_matrix(y_test, Ada_predict)*len(y_test)**(-1)).round(4)
Ada_report = sklearn.metrics.classification_report(y_test, Ada_predict, target_names = ['not Click', 'Click'], digits=4)
figure, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(15,3))
#AdaBoostClassifier Confusion Matrix
plt.sca(ax1)
plt.imshow(Ada_confusion, interpolation='nearest', cmap=plt.cm.YlGn)
plt.title('Confusion Matrix of AdaBoostClassifier')
plt.colorbar()
plt.xticks(range(2))
plt.yticks(range(2))
for i, j in itertools.product(range(2),range(2)):
plt.text(j, i, Ada_confusion[i, j], horizontalalignment="center", color="black", size='xx-large')
plt.ylabel('True label', size='large')
plt.xlabel('Predicted label', size='large')
plt.subplots_adjust(wspace=0.5, hspace=0.5)
#All ROC
plt.sca(ax2)
plt.plot(Ada_ROC[0], Ada_ROC[1], color='y',label='Ada')
plt.legend(loc=4)
plt.plot([0, 1], [0, 1], color='k', linestyle='--', linewidth=3)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate', size='large')
plt.ylabel('True Positive Rate', size='large')
plt.title('ROC Curve')
plt.subplots_adjust(wspace=1, hspace=0.5)
figure.savefig('temp.png')
print('AdaBoostClassifier parameters with cross-validation')
print(Ada.get_params())
print('Feature importance or coef')
print(pandas.DataFrame(numpy.array(Ada.feature_importances_.round(4)).T, columns=['Ada'], index=keys))
print('AdaBoostClassifier report')
print(Ada_report)
print('Time: ' + str(pandas.Timestamp.now()-start)) | BigDataAnalytics2017/ProjectOTA | ClassificationAdaBoostClassifier.py | Python | gpl-3.0 | 6,814 |
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 2 16:24:01 2019
@author: Rignak
"""
from urllib import request
from datetime import datetime
from lib.TWRV import ThreadWithReturnValue
from sys import stdout
from PIL import Image
import json
import os
from os.path import join, split, exists
import numpy as np
import re
from lib import Sample
resF = '_res'
def Flatten(l):
if all([type(x) != list for x in l]):
return l
else:
newL = []
for e in l:
if type(e) != list:
newL.append(e)
else:
newL = newL + Flatten(e)
return newL
def SingleJPGorPNG(url):
try:
req = request.Request(url)
req.add_header('Referer', 'https://www.pixiv.net/')
request.urlopen(req)
except Exception:
url = url.replace('.jpg', '.png')
return url
def FinalJPGorPNG():
with open('3-final.html', 'r') as file:
lines = file.readlines()
urls = lines[0].split('<br/>')
urls = [link.split('"')[1] for link in urls[:-1]]
if not urls and lines:
urls = lines
pages = []
for url in urls:
if url is None:
continue
elif '600x600' in url:
url = url.replace('c/600x600/img-master', 'img-original').replace('_master1200', '')
elif 'img-master' in url:
url = url.replace('img-master', 'img-original').replace('_master1200.jpg', '.jpg')
if url not in pages:
pages.append(url)
nb=50
begin = datetime.now()
threads = [None] * nb
newPages = []
l = len(pages)
for i, url in enumerate(pages):
threads[i%nb] = ThreadWithReturnValue(target=SingleJPGorPNG, args=(pages[i], ))
threads[i%nb].start()
if (i%nb == 0 and i != 0) or i==l-1:
res = [thread.join() for thread in threads if not (thread is None)]
threads = [None] * nb
for j, page in enumerate(res):
newPages.append(page)
ending = ((datetime.now() - begin) / (i+1) * l + begin).strftime('%H:%M')
stdout.write(f"\r{i+1}/{l} | {ending}")
pages = []
with open('3-final.html', 'w') as file:
for url in newPages:
if url not in pages:
pages.append(url)
file.write(f'<A HREF="{url}">{url}<br/>')
def SplitDirectLink():
nb = int(input('Number of url in each file ? '))
links = open('2-directlink.html', 'r').readline().split('https')
links = ['https' + link for link in links]
nbFile = len(links)//nb+1
for i in range(nbFile):
with open('2-directlink'+str(i)+'.html', 'w') as file:
for j in range(nb):
if i*nb+j+1 > len(links):
break
file.write(links[i*nb+j])
print(nbFile, 'files created')
def CorrectUrl(imgs):
links = open('2-directlink.html', 'r').readline().split('https')
dicUrl = {url.split('/')[-1]:url for url in links}
for img in imgs:
if img._url in dicUrl:
img._url = 'https'+dicUrl[img._url]
else:
img._url = 'https'+img._url
return imgs
def IsManga(imgs):
from keras.preprocessing import image as image_utils
from keras.models import load_model
from keras import backend as K
K.image_dim_ordering()
def FormatImage(img):
size = (150, 150)
tempImg = Image.open(img._data)
tempImg.thumbnail(size)
x, y = tempImg.size
newIm = Image.new('RGB', size, (0,0,0))
newIm.paste(tempImg, (int((size[0] - x)/2), int((size[1] - y)/2)))
newIm = image_utils.img_to_array(newIm)
return newIm
model = load_model('flatCat.h5')
finalImgs = []
begin = datetime.now()
l = len(imgs)
c = 0
batch = []
ims = []
ys = []
toRemove = []
for i,img in enumerate(imgs):
try:
batch.append(FormatImage(img))
ims.append(img)
ys.append(i)
except Exception as e :
print(e)
if c == 128 or i == len(imgs)-1:
batch = np.array(batch)
preds = model.predict(batch)
for j, pred in enumerate(preds):
im = ims[j]
if np.argmax(pred)==1:
finalImgs.append(im)
elif type(im._data) == str:
toRemove.append(im._data)
c = 0
batch = []
ims = []
ys = []
ending = (datetime.now() - begin) / (i+1) * l + begin
stdout.write(f"\r({len(finalImgs)}){i+1} on {l} | {ending.strftime('%H:%M')}")
c+=1
print('Removing')
[os.remove(path) for path in toRemove]
print('\nOut of', str(l)+',', len(finalImgs),'were illustrations')
return finalImgs
def Url2Data(url):
path = join(resF, split(url)[-1])
if exists(path):
im = Sample.Sample(url, dl=False)
im._data = path
else:
im = Sample.Sample(url)
im.Save()
return im
def ShowImgs(imgs):
files = []
imgs = [img for img in imgs if img and img._data]
save = {}
for j, img in enumerate(imgs):
filename = join(resF, f"{j}.jpg")
if type(img._data) is str and exists(img._data):
os.rename(img._data, filename)
else:
try:
tempImg = Image.open(img._data)
tempImg.save(filename, 'JPEG')
tempImg.close()
except Exception as e:
print('Error', e)
files.append(filename)
save[filename] = img._url
for file in os.listdir(resF):
if '_master' in file:
os.remove(join(resF, file))
with open('save.json', 'w') as file:
json.dump(save, file, sort_keys=True, indent=4)
ShowFromLocal()
print('-----------------------')
def ShowFromLocal():
with open('save.json', 'r') as file:
save = json.load(file)
imgs = os.listdir(resF)
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
imgs = sorted(imgs, key = alphanum_key)
ids = []
sameIds = {}
for filename in imgs:
filename = join(resF, filename)
if filename not in save:
continue
url = save[filename]
if 'pximg' in url:
id_, page = url.split('/')[-1].split('_')[:2]
try:
page = int(page[1:])
except:
page = 0
if id_ in ids:
sameIds[id_].append((page, filename))
else:
sameIds[id_] = [(page, filename)]
ids.append(id_)
else:
sameIds[url] = [('', filename)]
ids.append(url)
for key, value in sameIds.items():
sameIds[key] = [e2 for e1, e2 in sorted(value)]
input('If you want to check with the explorer...')
with open('3-final.html', 'w') as file:
for id_ in ids:
for filename in sameIds[id_]:
url = save[filename]
if exists(filename):
file.write(f'<A HREF="{url}">{url}<br/>')
| Rignak/Scripts-Python | Danbooru/NotDan/lib/Show.py | Python | gpl-3.0 | 7,228 |
# Stub for pre django 1.7 apps.
# ⁻*- coding: utf-8 -*-
from django.db import models
from django.conf import settings
from django.contrib.sessions.models import Session
from .utils import (get_cas_client, get_service_url)
class ProxyError(ValueError):
pass
class ProxyGrantingTicket(models.Model):
class Meta:
unique_together = ('session', 'user')
session = models.ForeignKey(
Session,
related_name="+",
blank=True,
null=True
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name="+",
null=True,
blank=True
)
pgtiou = models.CharField(max_length=255, null=True, blank=True)
pgt = models.CharField(max_length=255, null=True, blank=True)
date = models.DateTimeField(auto_now=True)
@classmethod
def retrieve_pt(cls, request):
"""`request` should be the current HttpRequest object
`service` a string representing the service for witch we want to
retrieve a ticket.
The function return a Proxy Ticket or raise `ProxyError`
"""
session = Session.objects.get(session_key=request.session.session_key)
try:
pgt = cls.objects.get(user=request.user, session=session).pgt
except cls.DoesNotExist:
raise ProxyError(
"INVALID_TICKET",
"No proxy ticket found for this HttpRequest object"
)
else:
service_url = get_service_url(request)
client = get_cas_client(service_url=service_url)
try:
return client.get_proxy_ticket(pgt)
except Exception as e:
raise ProxyError(unicode(e))
class SessionTicket(models.Model):
session = models.OneToOneField(Session, related_name="+")
ticket = models.CharField(max_length=255)
| IRI-Research/django-cas-ng | django_cas_ng/models.py | Python | mit | 1,863 |
# obsolete.py - obsolete markers handling
#
# Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
# Logilab SA <contact@logilab.fr>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Obsolete marker handling
An obsolete marker maps an old changeset to a list of new
changesets. If the list of new changesets is empty, the old changeset
is said to be "killed". Otherwise, the old changeset is being
"replaced" by the new changesets.
Obsolete markers can be used to record and distribute changeset graph
transformations performed by history rewrite operations, and help
building new tools to reconcile conflicting rewrite actions. To
facilitate conflict resolution, markers include various annotations
besides old and news changeset identifiers, such as creation date or
author name.
The old obsoleted changeset is called a "precursor" and possible
replacements are called "successors". Markers that used changeset X as
a precursor are called "successor markers of X" because they hold
information about the successors of X. Markers that use changeset Y as
a successors are call "precursor markers of Y" because they hold
information about the precursors of Y.
Examples:
- When changeset A is replaced by changeset A', one marker is stored:
(A, (A',))
- When changesets A and B are folded into a new changeset C, two markers are
stored:
(A, (C,)) and (B, (C,))
- When changeset A is simply "pruned" from the graph, a marker is created:
(A, ())
- When changeset A is split into B and C, a single marker are used:
(A, (C, C))
We use a single marker to distinguish the "split" case from the "divergence"
case. If two independent operations rewrite the same changeset A in to A' and
A'', we have an error case: divergent rewriting. We can detect it because
two markers will be created independently:
(A, (B,)) and (A, (C,))
Format
------
Markers are stored in an append-only file stored in
'.hg/store/obsstore'.
The file starts with a version header:
- 1 unsigned byte: version number, starting at zero.
The header is followed by the markers. Marker format depend of the version. See
comment associated with each format for details.
"""
import struct
import util, base85, node, parsers
import phases
from i18n import _
_pack = struct.pack
_unpack = struct.unpack
_calcsize = struct.calcsize
propertycache = util.propertycache
# the obsolete feature is not mature enough to be enabled by default.
# you have to rely on third party extension extension to enable this.
_enabled = False
# Options for obsolescence
createmarkersopt = 'createmarkers'
allowunstableopt = 'allowunstable'
exchangeopt = 'exchange'
### obsolescence marker flag
## bumpedfix flag
#
# When a changeset A' succeed to a changeset A which became public, we call A'
# "bumped" because it's a successors of a public changesets
#
# o A' (bumped)
# |`:
# | o A
# |/
# o Z
#
# The way to solve this situation is to create a new changeset Ad as children
# of A. This changeset have the same content than A'. So the diff from A to A'
# is the same than the diff from A to Ad. Ad is marked as a successors of A'
#
# o Ad
# |`:
# | x A'
# |'|
# o | A
# |/
# o Z
#
# But by transitivity Ad is also a successors of A. To avoid having Ad marked
# as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
# This flag mean that the successors express the changes between the public and
# bumped version and fix the situation, breaking the transitivity of
# "bumped" here.
bumpedfix = 1
usingsha256 = 2
## Parsing and writing of version "0"
#
# The header is followed by the markers. Each marker is made of:
#
# - 1 uint8 : number of new changesets "N", can be zero.
#
# - 1 uint32: metadata size "M" in bytes.
#
# - 1 byte: a bit field. It is reserved for flags used in common
# obsolete marker operations, to avoid repeated decoding of metadata
# entries.
#
# - 20 bytes: obsoleted changeset identifier.
#
# - N*20 bytes: new changesets identifiers.
#
# - M bytes: metadata as a sequence of nul-terminated strings. Each
# string contains a key and a value, separated by a colon ':', without
# additional encoding. Keys cannot contain '\0' or ':' and values
# cannot contain '\0'.
_fm0version = 0
_fm0fixed = '>BIB20s'
_fm0node = '20s'
_fm0fsize = _calcsize(_fm0fixed)
_fm0fnodesize = _calcsize(_fm0node)
def _fm0readmarkers(data, off):
# Loop on markers
l = len(data)
while off + _fm0fsize <= l:
# read fixed part
cur = data[off:off + _fm0fsize]
off += _fm0fsize
numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
# read replacement
sucs = ()
if numsuc:
s = (_fm0fnodesize * numsuc)
cur = data[off:off + s]
sucs = _unpack(_fm0node * numsuc, cur)
off += s
# read metadata
# (metadata will be decoded on demand)
metadata = data[off:off + mdsize]
if len(metadata) != mdsize:
raise util.Abort(_('parsing obsolete marker: metadata is too '
'short, %d bytes expected, got %d')
% (mdsize, len(metadata)))
off += mdsize
metadata = _fm0decodemeta(metadata)
try:
when, offset = metadata.pop('date', '0 0').split(' ')
date = float(when), int(offset)
except ValueError:
date = (0., 0)
parents = None
if 'p2' in metadata:
parents = (metadata.pop('p1', None), metadata.pop('p2', None))
elif 'p1' in metadata:
parents = (metadata.pop('p1', None),)
elif 'p0' in metadata:
parents = ()
if parents is not None:
try:
parents = tuple(node.bin(p) for p in parents)
# if parent content is not a nodeid, drop the data
for p in parents:
if len(p) != 20:
parents = None
break
except TypeError:
# if content cannot be translated to nodeid drop the data.
parents = None
metadata = tuple(sorted(metadata.iteritems()))
yield (pre, sucs, flags, metadata, date, parents)
def _fm0encodeonemarker(marker):
pre, sucs, flags, metadata, date, parents = marker
if flags & usingsha256:
raise util.Abort(_('cannot handle sha256 with old obsstore format'))
metadata = dict(metadata)
time, tz = date
metadata['date'] = '%r %i' % (time, tz)
if parents is not None:
if not parents:
# mark that we explicitly recorded no parents
metadata['p0'] = ''
for i, p in enumerate(parents):
metadata['p%i' % (i + 1)] = node.hex(p)
metadata = _fm0encodemeta(metadata)
numsuc = len(sucs)
format = _fm0fixed + (_fm0node * numsuc)
data = [numsuc, len(metadata), flags, pre]
data.extend(sucs)
return _pack(format, *data) + metadata
def _fm0encodemeta(meta):
"""Return encoded metadata string to string mapping.
Assume no ':' in key and no '\0' in both key and value."""
for key, value in meta.iteritems():
if ':' in key or '\0' in key:
raise ValueError("':' and '\0' are forbidden in metadata key'")
if '\0' in value:
raise ValueError("':' is forbidden in metadata value'")
return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
def _fm0decodemeta(data):
"""Return string to string dictionary from encoded version."""
d = {}
for l in data.split('\0'):
if l:
key, value = l.split(':')
d[key] = value
return d
## Parsing and writing of version "1"
#
# The header is followed by the markers. Each marker is made of:
#
# - uint32: total size of the marker (including this field)
#
# - float64: date in seconds since epoch
#
# - int16: timezone offset in minutes
#
# - uint16: a bit field. It is reserved for flags used in common
# obsolete marker operations, to avoid repeated decoding of metadata
# entries.
#
# - uint8: number of successors "N", can be zero.
#
# - uint8: number of parents "P", can be zero.
#
# 0: parents data stored but no parent,
# 1: one parent stored,
# 2: two parents stored,
# 3: no parent data stored
#
# - uint8: number of metadata entries M
#
# - 20 or 32 bytes: precursor changeset identifier.
#
# - N*(20 or 32) bytes: successors changesets identifiers.
#
# - P*(20 or 32) bytes: parents of the precursors changesets.
#
# - M*(uint8, uint8): size of all metadata entries (key and value)
#
# - remaining bytes: the metadata, each (key, value) pair after the other.
_fm1version = 1
_fm1fixed = '>IdhHBBB20s'
_fm1nodesha1 = '20s'
_fm1nodesha256 = '32s'
_fm1nodesha1size = _calcsize(_fm1nodesha1)
_fm1nodesha256size = _calcsize(_fm1nodesha256)
_fm1fsize = _calcsize(_fm1fixed)
_fm1parentnone = 3
_fm1parentshift = 14
_fm1parentmask = (_fm1parentnone << _fm1parentshift)
_fm1metapair = 'BB'
_fm1metapairsize = _calcsize('BB')
def _fm1purereadmarkers(data, off):
# make some global constants local for performance
noneflag = _fm1parentnone
sha2flag = usingsha256
sha1size = _fm1nodesha1size
sha2size = _fm1nodesha256size
sha1fmt = _fm1nodesha1
sha2fmt = _fm1nodesha256
metasize = _fm1metapairsize
metafmt = _fm1metapair
fsize = _fm1fsize
unpack = _unpack
# Loop on markers
stop = len(data) - _fm1fsize
ufixed = util.unpacker(_fm1fixed)
while off <= stop:
# read fixed part
o1 = off + fsize
t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
if flags & sha2flag:
# FIXME: prec was read as a SHA1, needs to be amended
# read 0 or more successors
if numsuc == 1:
o2 = o1 + sha2size
sucs = (data[o1:o2],)
else:
o2 = o1 + sha2size * numsuc
sucs = unpack(sha2fmt * numsuc, data[o1:o2])
# read parents
if numpar == noneflag:
o3 = o2
parents = None
elif numpar == 1:
o3 = o2 + sha2size
parents = (data[o2:o3],)
else:
o3 = o2 + sha2size * numpar
parents = unpack(sha2fmt * numpar, data[o2:o3])
else:
# read 0 or more successors
if numsuc == 1:
o2 = o1 + sha1size
sucs = (data[o1:o2],)
else:
o2 = o1 + sha1size * numsuc
sucs = unpack(sha1fmt * numsuc, data[o1:o2])
# read parents
if numpar == noneflag:
o3 = o2
parents = None
elif numpar == 1:
o3 = o2 + sha1size
parents = (data[o2:o3],)
else:
o3 = o2 + sha1size * numpar
parents = unpack(sha1fmt * numpar, data[o2:o3])
# read metadata
off = o3 + metasize * nummeta
metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
metadata = []
for idx in xrange(0, len(metapairsize), 2):
o1 = off + metapairsize[idx]
o2 = o1 + metapairsize[idx + 1]
metadata.append((data[off:o1], data[o1:o2]))
off = o2
yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
def _fm1encodeonemarker(marker):
pre, sucs, flags, metadata, date, parents = marker
# determine node size
_fm1node = _fm1nodesha1
if flags & usingsha256:
_fm1node = _fm1nodesha256
numsuc = len(sucs)
numextranodes = numsuc
if parents is None:
numpar = _fm1parentnone
else:
numpar = len(parents)
numextranodes += numpar
formatnodes = _fm1node * numextranodes
formatmeta = _fm1metapair * len(metadata)
format = _fm1fixed + formatnodes + formatmeta
# tz is stored in minutes so we divide by 60
tz = date[1]//60
data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
data.extend(sucs)
if parents is not None:
data.extend(parents)
totalsize = _calcsize(format)
for key, value in metadata:
lk = len(key)
lv = len(value)
data.append(lk)
data.append(lv)
totalsize += lk + lv
data[0] = totalsize
data = [_pack(format, *data)]
for key, value in metadata:
data.append(key)
data.append(value)
return ''.join(data)
def _fm1readmarkers(data, off):
native = getattr(parsers, 'fm1readmarkers', None)
if not native:
return _fm1purereadmarkers(data, off)
stop = len(data) - _fm1fsize
return native(data, off, stop)
# mapping to read/write various marker formats
# <version> -> (decoder, encoder)
formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
_fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
@util.nogc
def _readmarkers(data):
"""Read and enumerate markers from raw data"""
off = 0
diskversion = _unpack('>B', data[off:off + 1])[0]
off += 1
if diskversion not in formats:
raise util.Abort(_('parsing obsolete marker: unknown version %r')
% diskversion)
return diskversion, formats[diskversion][0](data, off)
def encodemarkers(markers, addheader=False, version=_fm0version):
# Kept separate from flushmarkers(), it will be reused for
# markers exchange.
encodeone = formats[version][1]
if addheader:
yield _pack('>B', version)
for marker in markers:
yield encodeone(marker)
class marker(object):
"""Wrap obsolete marker raw data"""
def __init__(self, repo, data):
# the repo argument will be used to create changectx in later version
self._repo = repo
self._data = data
self._decodedmeta = None
def __hash__(self):
return hash(self._data)
def __eq__(self, other):
if type(other) != type(self):
return False
return self._data == other._data
def precnode(self):
"""Precursor changeset node identifier"""
return self._data[0]
def succnodes(self):
"""List of successor changesets node identifiers"""
return self._data[1]
def parentnodes(self):
"""Parents of the precursors (None if not recorded)"""
return self._data[5]
def metadata(self):
"""Decoded metadata dictionary"""
return dict(self._data[3])
def date(self):
"""Creation date as (unixtime, offset)"""
return self._data[4]
def flags(self):
"""The flags field of the marker"""
return self._data[2]
@util.nogc
def _addsuccessors(successors, markers):
for mark in markers:
successors.setdefault(mark[0], set()).add(mark)
@util.nogc
def _addprecursors(precursors, markers):
for mark in markers:
for suc in mark[1]:
precursors.setdefault(suc, set()).add(mark)
@util.nogc
def _addchildren(children, markers):
for mark in markers:
parents = mark[5]
if parents is not None:
for p in parents:
children.setdefault(p, set()).add(mark)
def _checkinvalidmarkers(markers):
"""search for marker with invalid data and raise error if needed
Exist as a separated function to allow the evolve extension for a more
subtle handling.
"""
for mark in markers:
if node.nullid in mark[1]:
raise util.Abort(_('bad obsolescence marker detected: '
'invalid successors nullid'))
class obsstore(object):
"""Store obsolete markers
Markers can be accessed with two mappings:
- precursors[x] -> set(markers on precursors edges of x)
- successors[x] -> set(markers on successors edges of x)
- children[x] -> set(markers on precursors edges of children(x)
"""
fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
# prec: nodeid, precursor changesets
# succs: tuple of nodeid, successor changesets (0-N length)
# flag: integer, flag field carrying modifier for the markers (see doc)
# meta: binary blob, encoded metadata dictionary
# date: (float, int) tuple, date of marker creation
# parents: (tuple of nodeid) or None, parents of precursors
# None is used when no data has been recorded
def __init__(self, sopener, defaultformat=_fm1version, readonly=False):
# caches for various obsolescence related cache
self.caches = {}
self._all = []
self.sopener = sopener
data = sopener.tryread('obsstore')
self._version = defaultformat
self._readonly = readonly
if data:
self._version, markers = _readmarkers(data)
self._addmarkers(markers)
def __iter__(self):
return iter(self._all)
def __len__(self):
return len(self._all)
def __nonzero__(self):
return bool(self._all)
def create(self, transaction, prec, succs=(), flag=0, parents=None,
date=None, metadata=None):
"""obsolete: add a new obsolete marker
* ensuring it is hashable
* check mandatory metadata
* encode metadata
If you are a human writing code creating marker you want to use the
`createmarkers` function in this module instead.
return True if a new marker have been added, False if the markers
already existed (no op).
"""
if metadata is None:
metadata = {}
if date is None:
if 'date' in metadata:
# as a courtesy for out-of-tree extensions
date = util.parsedate(metadata.pop('date'))
else:
date = util.makedate()
if len(prec) != 20:
raise ValueError(prec)
for succ in succs:
if len(succ) != 20:
raise ValueError(succ)
if prec in succs:
raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
metadata = tuple(sorted(metadata.iteritems()))
marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
return bool(self.add(transaction, [marker]))
def add(self, transaction, markers):
"""Add new markers to the store
Take care of filtering duplicate.
Return the number of new marker."""
if self._readonly:
raise util.Abort('creating obsolete markers is not enabled on this '
'repo')
known = set(self._all)
new = []
for m in markers:
if m not in known:
known.add(m)
new.append(m)
if new:
f = self.sopener('obsstore', 'ab')
try:
offset = f.tell()
transaction.add('obsstore', offset)
# offset == 0: new file - add the version header
for bytes in encodemarkers(new, offset == 0, self._version):
f.write(bytes)
finally:
# XXX: f.close() == filecache invalidation == obsstore rebuilt.
# call 'filecacheentry.refresh()' here
f.close()
self._addmarkers(new)
# new marker *may* have changed several set. invalidate the cache.
self.caches.clear()
# records the number of new markers for the transaction hooks
previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
return len(new)
def mergemarkers(self, transaction, data):
"""merge a binary stream of markers inside the obsstore
Returns the number of new markers added."""
version, markers = _readmarkers(data)
return self.add(transaction, markers)
@propertycache
def successors(self):
successors = {}
_addsuccessors(successors, self._all)
return successors
@propertycache
def precursors(self):
precursors = {}
_addprecursors(precursors, self._all)
return precursors
@propertycache
def children(self):
children = {}
_addchildren(children, self._all)
return children
def _cached(self, attr):
return attr in self.__dict__
def _addmarkers(self, markers):
markers = list(markers) # to allow repeated iteration
self._all.extend(markers)
if self._cached('successors'):
_addsuccessors(self.successors, markers)
if self._cached('precursors'):
_addprecursors(self.precursors, markers)
if self._cached('children'):
_addchildren(self.children, markers)
_checkinvalidmarkers(markers)
def relevantmarkers(self, nodes):
"""return a set of all obsolescence markers relevant to a set of nodes.
"relevant" to a set of nodes mean:
- marker that use this changeset as successor
- prune marker of direct children on this changeset
- recursive application of the two rules on precursors of these markers
It is a set so you cannot rely on order."""
pendingnodes = set(nodes)
seenmarkers = set()
seennodes = set(pendingnodes)
precursorsmarkers = self.precursors
children = self.children
while pendingnodes:
direct = set()
for current in pendingnodes:
direct.update(precursorsmarkers.get(current, ()))
pruned = [m for m in children.get(current, ()) if not m[1]]
direct.update(pruned)
direct -= seenmarkers
pendingnodes = set([m[0] for m in direct])
seenmarkers |= direct
pendingnodes -= seennodes
seennodes |= pendingnodes
return seenmarkers
def commonversion(versions):
"""Return the newest version listed in both versions and our local formats.
Returns None if no common version exists.
"""
versions.sort(reverse=True)
# search for highest version known on both side
for v in versions:
if v in formats:
return v
return None
# arbitrary picked to fit into 8K limit from HTTP server
# you have to take in account:
# - the version header
# - the base85 encoding
_maxpayload = 5300
def _pushkeyescape(markers):
"""encode markers into a dict suitable for pushkey exchange
- binary data is base85 encoded
- split in chunks smaller than 5300 bytes"""
keys = {}
parts = []
currentlen = _maxpayload * 2 # ensure we create a new part
for marker in markers:
nextdata = _fm0encodeonemarker(marker)
if (len(nextdata) + currentlen > _maxpayload):
currentpart = []
currentlen = 0
parts.append(currentpart)
currentpart.append(nextdata)
currentlen += len(nextdata)
for idx, part in enumerate(reversed(parts)):
data = ''.join([_pack('>B', _fm0version)] + part)
keys['dump%i' % idx] = base85.b85encode(data)
return keys
def listmarkers(repo):
"""List markers over pushkey"""
if not repo.obsstore:
return {}
return _pushkeyescape(repo.obsstore)
def pushmarker(repo, key, old, new):
"""Push markers over pushkey"""
if not key.startswith('dump'):
repo.ui.warn(_('unknown key: %r') % key)
return 0
if old:
repo.ui.warn(_('unexpected old value for %r') % key)
return 0
data = base85.b85decode(new)
lock = repo.lock()
try:
tr = repo.transaction('pushkey: obsolete markers')
try:
repo.obsstore.mergemarkers(tr, data)
tr.close()
return 1
finally:
tr.release()
finally:
lock.release()
def getmarkers(repo, nodes=None):
"""returns markers known in a repository
If <nodes> is specified, only markers "relevant" to those nodes are are
returned"""
if nodes is None:
rawmarkers = repo.obsstore
else:
rawmarkers = repo.obsstore.relevantmarkers(nodes)
for markerdata in rawmarkers:
yield marker(repo, markerdata)
def relevantmarkers(repo, node):
"""all obsolete markers relevant to some revision"""
for markerdata in repo.obsstore.relevantmarkers(node):
yield marker(repo, markerdata)
def precursormarkers(ctx):
"""obsolete marker marking this changeset as a successors"""
for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
yield marker(ctx.repo(), data)
def successormarkers(ctx):
"""obsolete marker making this changeset obsolete"""
for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
yield marker(ctx.repo(), data)
def allsuccessors(obsstore, nodes, ignoreflags=0):
"""Yield node for every successor of <nodes>.
Some successors may be unknown locally.
This is a linear yield unsuited to detecting split changesets. It includes
initial nodes too."""
remaining = set(nodes)
seen = set(remaining)
while remaining:
current = remaining.pop()
yield current
for mark in obsstore.successors.get(current, ()):
# ignore marker flagged with specified flag
if mark[2] & ignoreflags:
continue
for suc in mark[1]:
if suc not in seen:
seen.add(suc)
remaining.add(suc)
def allprecursors(obsstore, nodes, ignoreflags=0):
"""Yield node for every precursors of <nodes>.
Some precursors may be unknown locally.
This is a linear yield unsuited to detecting folded changesets. It includes
initial nodes too."""
remaining = set(nodes)
seen = set(remaining)
while remaining:
current = remaining.pop()
yield current
for mark in obsstore.precursors.get(current, ()):
# ignore marker flagged with specified flag
if mark[2] & ignoreflags:
continue
suc = mark[0]
if suc not in seen:
seen.add(suc)
remaining.add(suc)
def foreground(repo, nodes):
"""return all nodes in the "foreground" of other node
The foreground of a revision is anything reachable using parent -> children
or precursor -> successor relation. It is very similar to "descendant" but
augmented with obsolescence information.
Beware that possible obsolescence cycle may result if complex situation.
"""
repo = repo.unfiltered()
foreground = set(repo.set('%ln::', nodes))
if repo.obsstore:
# We only need this complicated logic if there is obsolescence
# XXX will probably deserve an optimised revset.
nm = repo.changelog.nodemap
plen = -1
# compute the whole set of successors or descendants
while len(foreground) != plen:
plen = len(foreground)
succs = set(c.node() for c in foreground)
mutable = [c.node() for c in foreground if c.mutable()]
succs.update(allsuccessors(repo.obsstore, mutable))
known = (n for n in succs if n in nm)
foreground = set(repo.set('%ln::', known))
return set(c.node() for c in foreground)
def successorssets(repo, initialnode, cache=None):
"""Return all set of successors of initial nodes
The successors set of a changeset A are a group of revisions that succeed
A. It succeeds A as a consistent whole, each revision being only a partial
replacement. The successors set contains non-obsolete changesets only.
This function returns the full list of successor sets which is why it
returns a list of tuples and not just a single tuple. Each tuple is a valid
successors set. Not that (A,) may be a valid successors set for changeset A
(see below).
In most cases, a changeset A will have a single element (e.g. the changeset
A is replaced by A') in its successors set. Though, it is also common for a
changeset A to have no elements in its successor set (e.g. the changeset
has been pruned). Therefore, the returned list of successors sets will be
[(A',)] or [], respectively.
When a changeset A is split into A' and B', however, it will result in a
successors set containing more than a single element, i.e. [(A',B')].
Divergent changesets will result in multiple successors sets, i.e. [(A',),
(A'')].
If a changeset A is not obsolete, then it will conceptually have no
successors set. To distinguish this from a pruned changeset, the successor
set will only contain itself, i.e. [(A,)].
Finally, successors unknown locally are considered to be pruned (obsoleted
without any successors).
The optional `cache` parameter is a dictionary that may contain precomputed
successors sets. It is meant to reuse the computation of a previous call to
`successorssets` when multiple calls are made at the same time. The cache
dictionary is updated in place. The caller is responsible for its live
spawn. Code that makes multiple calls to `successorssets` *must* use this
cache mechanism or suffer terrible performances.
"""
succmarkers = repo.obsstore.successors
# Stack of nodes we search successors sets for
toproceed = [initialnode]
# set version of above list for fast loop detection
# element added to "toproceed" must be added here
stackedset = set(toproceed)
if cache is None:
cache = {}
# This while loop is the flattened version of a recursive search for
# successors sets
#
# def successorssets(x):
# successors = directsuccessors(x)
# ss = [[]]
# for succ in directsuccessors(x):
# # product as in itertools cartesian product
# ss = product(ss, successorssets(succ))
# return ss
#
# But we can not use plain recursive calls here:
# - that would blow the python call stack
# - obsolescence markers may have cycles, we need to handle them.
#
# The `toproceed` list act as our call stack. Every node we search
# successors set for are stacked there.
#
# The `stackedset` is set version of this stack used to check if a node is
# already stacked. This check is used to detect cycles and prevent infinite
# loop.
#
# successors set of all nodes are stored in the `cache` dictionary.
#
# After this while loop ends we use the cache to return the successors sets
# for the node requested by the caller.
while toproceed:
# Every iteration tries to compute the successors sets of the topmost
# node of the stack: CURRENT.
#
# There are four possible outcomes:
#
# 1) We already know the successors sets of CURRENT:
# -> mission accomplished, pop it from the stack.
# 2) Node is not obsolete:
# -> the node is its own successors sets. Add it to the cache.
# 3) We do not know successors set of direct successors of CURRENT:
# -> We add those successors to the stack.
# 4) We know successors sets of all direct successors of CURRENT:
# -> We can compute CURRENT successors set and add it to the
# cache.
#
current = toproceed[-1]
if current in cache:
# case (1): We already know the successors sets
stackedset.remove(toproceed.pop())
elif current not in succmarkers:
# case (2): The node is not obsolete.
if current in repo:
# We have a valid last successors.
cache[current] = [(current,)]
else:
# Final obsolete version is unknown locally.
# Do not count that as a valid successors
cache[current] = []
else:
# cases (3) and (4)
#
# We proceed in two phases. Phase 1 aims to distinguish case (3)
# from case (4):
#
# For each direct successors of CURRENT, we check whether its
# successors sets are known. If they are not, we stack the
# unknown node and proceed to the next iteration of the while
# loop. (case 3)
#
# During this step, we may detect obsolescence cycles: a node
# with unknown successors sets but already in the call stack.
# In such a situation, we arbitrary set the successors sets of
# the node to nothing (node pruned) to break the cycle.
#
# If no break was encountered we proceed to phase 2.
#
# Phase 2 computes successors sets of CURRENT (case 4); see details
# in phase 2 itself.
#
# Note the two levels of iteration in each phase.
# - The first one handles obsolescence markers using CURRENT as
# precursor (successors markers of CURRENT).
#
# Having multiple entry here means divergence.
#
# - The second one handles successors defined in each marker.
#
# Having none means pruned node, multiple successors means split,
# single successors are standard replacement.
#
for mark in sorted(succmarkers[current]):
for suc in mark[1]:
if suc not in cache:
if suc in stackedset:
# cycle breaking
cache[suc] = []
else:
# case (3) If we have not computed successors sets
# of one of those successors we add it to the
# `toproceed` stack and stop all work for this
# iteration.
toproceed.append(suc)
stackedset.add(suc)
break
else:
continue
break
else:
# case (4): we know all successors sets of all direct
# successors
#
# Successors set contributed by each marker depends on the
# successors sets of all its "successors" node.
#
# Each different marker is a divergence in the obsolescence
# history. It contributes successors sets distinct from other
# markers.
#
# Within a marker, a successor may have divergent successors
# sets. In such a case, the marker will contribute multiple
# divergent successors sets. If multiple successors have
# divergent successors sets, a Cartesian product is used.
#
# At the end we post-process successors sets to remove
# duplicated entry and successors set that are strict subset of
# another one.
succssets = []
for mark in sorted(succmarkers[current]):
# successors sets contributed by this marker
markss = [[]]
for suc in mark[1]:
# cardinal product with previous successors
productresult = []
for prefix in markss:
for suffix in cache[suc]:
newss = list(prefix)
for part in suffix:
# do not duplicated entry in successors set
# first entry wins.
if part not in newss:
newss.append(part)
productresult.append(newss)
markss = productresult
succssets.extend(markss)
# remove duplicated and subset
seen = []
final = []
candidate = sorted(((set(s), s) for s in succssets if s),
key=lambda x: len(x[1]), reverse=True)
for setversion, listversion in candidate:
for seenset in seen:
if setversion.issubset(seenset):
break
else:
final.append(listversion)
seen.append(setversion)
final.reverse() # put small successors set first
cache[current] = final
return cache[initialnode]
def _knownrevs(repo, nodes):
"""yield revision numbers of known nodes passed in parameters
Unknown revisions are silently ignored."""
torev = repo.changelog.nodemap.get
for n in nodes:
rev = torev(n)
if rev is not None:
yield rev
# mapping of 'set-name' -> <function to compute this set>
cachefuncs = {}
def cachefor(name):
"""Decorator to register a function as computing the cache for a set"""
def decorator(func):
assert name not in cachefuncs
cachefuncs[name] = func
return func
return decorator
def getrevs(repo, name):
"""Return the set of revision that belong to the <name> set
Such access may compute the set and cache it for future use"""
repo = repo.unfiltered()
if not repo.obsstore:
return frozenset()
if name not in repo.obsstore.caches:
repo.obsstore.caches[name] = cachefuncs[name](repo)
return repo.obsstore.caches[name]
# To be simple we need to invalidate obsolescence cache when:
#
# - new changeset is added:
# - public phase is changed
# - obsolescence marker are added
# - strip is used a repo
def clearobscaches(repo):
"""Remove all obsolescence related cache from a repo
This remove all cache in obsstore is the obsstore already exist on the
repo.
(We could be smarter here given the exact event that trigger the cache
clearing)"""
# only clear cache is there is obsstore data in this repo
if 'obsstore' in repo._filecache:
repo.obsstore.caches.clear()
@cachefor('obsolete')
def _computeobsoleteset(repo):
"""the set of obsolete revisions"""
obs = set()
getrev = repo.changelog.nodemap.get
getphase = repo._phasecache.phase
for n in repo.obsstore.successors:
rev = getrev(n)
if rev is not None and getphase(repo, rev):
obs.add(rev)
return obs
@cachefor('unstable')
def _computeunstableset(repo):
"""the set of non obsolete revisions with obsolete parents"""
# revset is not efficient enough here
# we do (obsolete()::) - obsolete() by hand
obs = getrevs(repo, 'obsolete')
if not obs:
return set()
cl = repo.changelog
return set(r for r in cl.descendants(obs) if r not in obs)
@cachefor('suspended')
def _computesuspendedset(repo):
"""the set of obsolete parents with non obsolete descendants"""
suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
@cachefor('extinct')
def _computeextinctset(repo):
"""the set of obsolete parents without non obsolete descendants"""
return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
@cachefor('bumped')
def _computebumpedset(repo):
"""the set of revs trying to obsolete public revisions"""
bumped = set()
# util function (avoid attribute lookup in the loop)
phase = repo._phasecache.phase # would be faster to grab the full list
public = phases.public
cl = repo.changelog
torev = cl.nodemap.get
obs = getrevs(repo, 'obsolete')
for rev in repo:
# We only evaluate mutable, non-obsolete revision
if (public < phase(repo, rev)) and (rev not in obs):
node = cl.node(rev)
# (future) A cache of precursors may worth if split is very common
for pnode in allprecursors(repo.obsstore, [node],
ignoreflags=bumpedfix):
prev = torev(pnode) # unfiltered! but so is phasecache
if (prev is not None) and (phase(repo, prev) <= public):
# we have a public precursors
bumped.add(rev)
break # Next draft!
return bumped
@cachefor('divergent')
def _computedivergentset(repo):
"""the set of rev that compete to be the final successors of some revision.
"""
divergent = set()
obsstore = repo.obsstore
newermap = {}
for ctx in repo.set('(not public()) - obsolete()'):
mark = obsstore.precursors.get(ctx.node(), ())
toprocess = set(mark)
seen = set()
while toprocess:
prec = toprocess.pop()[0]
if prec in seen:
continue # emergency cycle hanging prevention
seen.add(prec)
if prec not in newermap:
successorssets(repo, prec, newermap)
newer = [n for n in newermap[prec] if n]
if len(newer) > 1:
divergent.add(ctx.rev())
break
toprocess.update(obsstore.precursors.get(prec, ()))
return divergent
def createmarkers(repo, relations, flag=0, date=None, metadata=None):
"""Add obsolete markers between changesets in a repo
<relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
tuple. `old` and `news` are changectx. metadata is an optional dictionary
containing metadata for this marker only. It is merged with the global
metadata specified through the `metadata` argument of this function,
Trying to obsolete a public changeset will raise an exception.
Current user and date are used except if specified otherwise in the
metadata attribute.
This function operates within a transaction of its own, but does
not take any lock on the repo.
"""
# prepare metadata
if metadata is None:
metadata = {}
if 'user' not in metadata:
metadata['user'] = repo.ui.username()
tr = repo.transaction('add-obsolescence-marker')
try:
for rel in relations:
prec = rel[0]
sucs = rel[1]
localmetadata = metadata.copy()
if 2 < len(rel):
localmetadata.update(rel[2])
if not prec.mutable():
raise util.Abort("cannot obsolete immutable changeset: %s"
% prec)
nprec = prec.node()
nsucs = tuple(s.node() for s in sucs)
npare = None
if not nsucs:
npare = tuple(p.node() for p in prec.parents())
if nprec in nsucs:
raise util.Abort("changeset %s cannot obsolete itself" % prec)
repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
date=date, metadata=localmetadata)
repo.filteredrevcache.clear()
tr.close()
finally:
tr.release()
def isenabled(repo, option):
"""Returns True if the given repository has the given obsolete option
enabled.
"""
result = set(repo.ui.configlist('experimental', 'evolution'))
if 'all' in result:
return True
# For migration purposes, temporarily return true if the config hasn't been
# set but _enabled is true.
if len(result) == 0 and _enabled:
return True
# createmarkers must be enabled if other options are enabled
if ((allowunstableopt in result or exchangeopt in result) and
not createmarkersopt in result):
raise util.Abort(_("'createmarkers' obsolete option must be enabled "
"if other obsolete options are enabled"))
return option in result
| hekra01/mercurial | mercurial/obsolete.py | Python | gpl-2.0 | 44,237 |
from buildbot import steps
from proteibb.core.builder.step import Step
class ProcessSources(Step):
def __init__(self, project, file_filter):
# Find all files for compilation step.
# Make directories.
# Use different steps?
self._project = project
def setup(self, work_dir, *args, **kwargs):
pass
def data(self):
pass
def step(self):
pass
| simonenkos/proteibb | proteibb/core/builder/steps/process_sources.py | Python | gpl-2.0 | 416 |
#!/usr/bin/env python
# Copyright(c)2012-2013 Internet Archive. Software license AGPL version 3.
#
# This file is part of the `surt` python package.
#
# surt is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# surt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with surt. If not, see <http://www.gnu.org/licenses/>.
#
# The surt source is hosted at https://github.com/internetarchive/surt
"""This is a python port of the WaybackURLKeyMaker.java class:
http://archive-access.svn.sourceforge.net/viewvc/archive-access/trunk/archive-access/projects/archive-commons/src/main/java/org/archive/url/WaybackURLKeyMaker.java?view=markup
"""
from __future__ import absolute_import
from surt.handyurl import handyurl
from surt.URLRegexTransformer import hostToSURT
import surt.DefaultIAURLCanonicalizer as DefaultIAURLCanonicalizer
class CompositeCanonicalizer(object):
def __init__(self, canonicalizers):
self.canonicalizers = [
self._normalize(canon) for canon in canonicalizers
]
def __call__(self, hurl, **options):
for canon in self.canonicalizers:
hurl = canon(hurl, **options)
return hurl
@staticmethod
def _normalize(canonicalizer):
if hasattr(canonicalizer, '__call__'):
return canonicalizer
if hasattr(canonicalizer, 'canonicalize'):
return canonicalizer.canonicalize
raise AttributeError('canonicalizer must either be callable or have'
' "canonicalizer" method')
# surt()
#_______________________________________________________________________________
def surt(url, canonicalizer=None, **options):
if isinstance(url, bytes):
return _surt_bytes(url, canonicalizer, **options)
else:
if url is not None:
url = url.encode('utf-8')
return _surt_bytes(url, canonicalizer, **options).decode('utf-8')
def _surt_bytes(url, canonicalizer, **options):
if not url:
return b"-"
if url.startswith(b"filedesc"):
return url
if canonicalizer is None:
canonicalizer = DefaultIAURLCanonicalizer.canonicalize
else:
if isinstance(canonicalizer, (list, tuple)):
canonicalizer = CompositeCanonicalizer(canonicalizer)
elif (not hasattr(canonicalizer, '__call__') and
hasattr(canonicalizer, 'canonicalize')):
canonicalizer = canonicalizer.canonicalize
options.setdefault('surt', True)
options.setdefault('with_scheme', False)
hurl = canonicalizer(handyurl.parse(url), **options)
return hurl.geturl_bytes(**options)
| internetarchive/surt | surt/surt.py | Python | agpl-3.0 | 3,121 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.magic.basics.skyregion Contains the SkyRegion class.
# -----------------------------------------------------------------
# Ensure Python 3 functionality
from __future__ import absolute_import, division, print_function
# Import astronomical modules
import pyregion
from astropy.units import Unit
from astropy.coordinates import Angle
# Import the relevant PTS classes and modules
from .vector import Extent
from .skygeometry import SkyCoordinate, SkyLine, SkyCircle, SkyEllipse, SkyRectangle, SkyPolygon
# -----------------------------------------------------------------
class SkyRegion(list):
"""
This class ...
"""
def __init__(self):
"""
This function ...
:return:
"""
# Call the constructor of the base class
super(SkyRegion, self).__init__()
# -----------------------------------------------------------------
@classmethod
def from_file(cls, path, only=None, ignore=None, color=None, ignore_color=None):
"""
This function ...
:param path:
:param only:
:param ignore:
:param color:
:param ignore_color:
:return:
"""
# Create a new sky region
region = cls()
# Open the region file with pyregion and check if its in sky coordinates
_region = pyregion.open(path)
def check_sky_coord(_region):
if [s for s in _region if s.coord_format != "fk5"]: return False
else: return True
if not check_sky_coord(_region): raise ValueError("Region is not in sky coordinates")
# Loop over all shapes in the region
for shape in _region:
# Meta information
meta = {}
if "text" in shape.attr[1]: meta["text"] = shape.attr[1]["text"]
if "color" in shape.attr[1]: meta["color"] = shape.attr[1]["color"]
if "point" in shape.attr[1]: meta["point"] = shape.attr[1]["point"]
# Check the color of the shape
if color is not None and shape.attr[1]["color"] != color: continue
if ignore_color is not None and shape.attr[1]["color"] == ignore_color: continue
# Get the coordinate format of this shape
coord_format = shape.coord_format
# The shape is a point -> SkyCoord
if shape.name == "point":
# Get RA and declination
ra = shape.coord_list[0]
dec = shape.coord_list[1]
# Create sky coordinate
coordinate = SkyCoordinate(ra=ra, dec=dec, unit="deg", frame=coord_format, meta=meta)
new_shape = coordinate
# The shape is a line or vector -> SkyLine
elif shape.name == "line" or shape.name == "vector":
# Get the RA and declination of the two points
ra_1 = shape.coord_list[0]
dec_1 = shape.coord_list[1]
ra_2 = shape.coord_list[2]
dec_2 = shape.coord_list[3]
# Create the sky coordinates
coordinate_1 = SkyCoordinate(ra=ra_1, dec=dec_1, unit="deg", frame=coord_format)
coordinate_2 = SkyCoordinate(ra=ra_2, dec=dec_2, unit="deg", frame=coord_format)
# Create the SkyLine object
line = SkyLine(coordinate_1, coordinate_2, meta=meta)
new_shape = line
# The shape is a circle -> SkyCircle
elif shape.name == "circle":
# Get the RA and declination of the center and the radius
ra_center = shape.coord_list[0]
dec_center = shape.coord_list[1]
radius = shape.coord_list[2] * Unit("deg")
# Create a sky cooridnate for the center
center = SkyCoordinate(ra=ra_center, dec=dec_center, unit="deg", frame=coord_format)
# Create a SkyCircle object and add it to the region
circle = SkyCircle(center, radius, meta=meta)
new_shape = circle
# The shape is an ellipse -> SkyEllipse
elif shape.name == "ellipse":
# Get the RA and declination of the center
ra_center = shape.coord_list[0]
dec_center = shape.coord_list[1]
center = SkyCoordinate(ra=ra_center, dec=dec_center, unit="deg", frame=coord_format)
# Get the radius
x_radius = shape.coord_list[2] * Unit("deg")
y_radius = shape.coord_list[3] * Unit("deg")
radius = Extent(x_radius, y_radius)
# Get the angle
angle = Angle(shape.coord_list[4], "deg")
# Create a SkyEllipse object and add it to the region
ellipse = SkyEllipse(center, radius, angle, meta=meta)
new_shape = ellipse
# The shape is a rectangle -> SkyRectangle
elif shape.name == "box":
# Get the RA and declination of the center
ra_center = shape.coord_list[0]
dec_center = shape.coord_list[1]
center = SkyCoordinate(ra=ra_center, dec=dec_center, unit="deg", frame=coord_format)
# Get the width and height
width = shape.coord_list[2] * Unit("deg")
height = shape.coord_list[3] * Unit("deg")
# Create radius
radius = Extent(0.5 * width, 0.5*height)
# Get the angle
angle = Angle(shape.coord_list[4], "deg")
# Create a SkyRectangle and add it to the region
rectangle = SkyRectangle(center, radius, angle, meta=meta)
new_shape = rectangle
# The shape is a polygon -> SkyPolygon
elif shape.name == "polygon":
number_of_points = 0.5 * len(shape.coord_list)
assert int(number_of_points) == number_of_points
number_of_points = int(number_of_points)
# Create a new SkyPolygon
polygon = SkyPolygon(meta=meta)
# Get the RA and declination of the different points
for i in range(number_of_points):
# Create a new SkyCoordinate object
ra = shape.coord_list[2*i]
dec = shape.coord_list[2*i + 1]
coordinate = SkyCoordinate(ra=ra, dec=dec, unit="deg", frame=coord_format)
# Add the coordinate to the polygon
polygon.add_point(coordinate)
new_shape = polygon
# Unrecognized shape
else: raise ValueError("Unrecognized shape (should be point, line, vector, circle, ellipse, box or polygon")
region.append(new_shape)
# Return the new region
return region
# -----------------------------------------------------------------
def append(self, shape):
"""
This function ...
:param shape:
:return:
"""
# Check whether the shape is in sky coordinates
if not shape.__class__.__name__.startswith("Sky"): raise ValueError("Shape must be SkyCoordinate, SkyLine, SkyCircle, SkyEllipse, SkyRectangle, SkyPolygon or SkyComposite")
# Otherwise, add the shape
super(SkyRegion, self).append(shape)
# -----------------------------------------------------------------
def __mul__(self, value):
"""
This function ...
:param value:
:return:
"""
# Create a new region
new_region = SkyRegion()
for shape in self: new_region.append(shape * value)
# Return the new region
return new_region
# -----------------------------------------------------------------
def __truediv__(self, value):
"""
This function ...
:param value:
:return:
"""
self.__div__(value)
# -----------------------------------------------------------------
def __div__(self, value):
"""
This function ...
:param value:
:return:
"""
# Create a new region
new_region = SkyRegion()
for ellipse in self: new_region.append(ellipse / value)
# Return the new region
return new_region
# -----------------------------------------------------------------
def to_pixel(self, wcs):
"""
This function ...
:param wcs:
:return:
"""
# Avoid circular import at module level
from .region import Region
# Create a new region
region = Region()
# Fill the new list
for shape in self: region.append(shape.to_pixel(wcs))
# Return region in pixel coordinates
return region
# -----------------------------------------------------------------
def save(self, path):
"""
This function ...
:param path:
:return:
"""
# Create a file
f = open(path, 'w')
# Initialize the region string
print("# Region file format: DS9 version 4.1", file=f)
# Write the coordinate system
print("fk5\n", file=f)
# Loop over all shapes, get string and print it to the region file
for shape in self: print(shape.to_region_string(coordinate_system=False), file=f)
# Close the file
f.close()
# -----------------------------------------------------------------
| Stargrazer82301/CAAPR | CAAPR/CAAPR_AstroMagic/PTS/pts/magic/basics/skyregion.py | Python | mit | 9,929 |
#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
DEFAULT_SETTINGS = dict(
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'lbattachment',
'lbattachment.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3"
}
},
SILENCED_SYSTEM_CHECKS=["1_7.W001"],
)
def run(command):
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, 'setup'):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
appdir = os.path.join(parent, 'lbattachment')
os.chdir(appdir)
from django.core.management import call_command
params = {}
if command == 'make':
params['locale'] = ['zh-Hans']
call_command('%smessages' % command, **params)
if __name__ == '__main__':
if (len(sys.argv)) < 2 or (sys.argv[1] not in {'make', 'compile'}):
print("Run `translations.py make` or `translations.py compile`.")
sys.exit(1)
run(sys.argv[1])
| vicalloy/django-lbattachment | translations.py | Python | mit | 1,166 |
#! /usr/bin/env python
"""!
-----------------------------------------------------------------------------
File Name : utilities.py
Purpose:
Created: 08-Dec-2015 05:14:14 AEDT
-----------------------------------------------------------------------------
Revision History
-----------------------------------------------------------------------------
S.D.G
"""
__author__ = 'Ben Johnston'
__revision__ = '0.1'
__date__ = '08-Dec-2015 05:14:14 AEDT'
__license__ = 'MPL v2.0'
# LICENSE DETAILS############################################################
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# IMPORTS#####################################################################
from funds_tracker.models import Donation
from django.db.models import Sum
import matplotlib.pyplot as plt
from StringIO import StringIO
import base64
import numpy as np
import pdb
##############################################################################
def get_donation_year_total(party_name, party_year):
return sum(Donation.objects.filter(party=party_name, year=party_year).
values_list('amount', flat=True))
def generate_pie_chart(names, values, colours=None):
plt.figure()
patches, texts, autotexts = plt.pie(values,
labels=names,
autopct='%1.1f%%',
radius=0.8,
wedgeprops={'edgecolor': 'white',
'linewidth': 2},
colors=colours)
for t in texts:
t.set_size('smaller')
t.set_family('Arial')
for t in autotexts:
t.set_size('smaller')
t.set_color('white')
t.set_weight('bold')
chart_buffer = StringIO()
plt.savefig(chart_buffer, bbox_inches='tight', format="png")
return base64.b64encode(chart_buffer.getvalue())
def generate_bar_chart(names, values, colour=None, string_buff=True):
plt.figure()
N = len(names)
ind = np.arange(N)
fig, ax = plt.subplots()
ax.bar(ind, values, color='b', align='center', edgecolor='blue')
# ax.bar(ind, values[::-1], color='b', align='center', edgecolor='blue')
disp_names = tuple(map(str, names))
# disp_names = tuple(map(str, names[::-1]))
ax.set_xticks(ind)
ax.set_xticklabels(disp_names, ha='center', minor=False)
ax.axes.get_yaxis().set_visible(False)
for spine in ax.spines.itervalues():
spine.set_visible(False)
for tic in ax.axes.get_xticklines():
tic.set_visible(False)
if string_buff:
chart_buffer = StringIO()
plt.savefig(chart_buffer, bbox_inches='tight', format="png")
plt.close()
return base64.b64encode(chart_buffer.getvalue())
else:
return plt
def generate_bar_charts():
parties = Donation.objects.values_list('party', flat=True).distinct()
for py in parties:
years = Donation.objects.filter(party=py).values_list('year',
flat=True).distinct()
amount = {}
for yr in years:
tmp_amount = Donation.objects.filter(party=py, year=yr).aggregate(Sum('amount'))
amount[int(yr)] = tmp_amount['amount__sum']
chart = generate_bar_chart(amount.keys(), amount.values(),
string_buff=False)
chart.savefig('funds_tracker/static/funds_tracker/test.png')
chart.close()
return
| benjohnston24/partyCoin-interface | funds_tracker/utilities.py | Python | mpl-2.0 | 3,696 |
#!/usr/bin/env python
"""Extend an existing chain of nucleotides."""
# Copyright 2012 Kevin Keating
#
# Licensed under the Educational Community License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.osedu.org/licenses/ECL-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS"
# BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
from coot import add_status_bar_text, clear_pending_picks, imol_refinement_map
from coot import user_defined_click_py as user_defined_click
from selectMapDialog import selectMapDialog
from citationPopup import createCitationPopup
from pseudoMolecule import PseudoMolecule
import rotamerize #we only need clearPendingRotamerize, but if we import that function directly into this namespace
#we'll wind up with circular dependencies
from traceGui import TraceGui
from reportInputErrors import reportPDB2Error, reportInsCodeError, STANDARD_BASES, reportModifiedNucError
__menuItem = None #the "Add to terminal nts..." menuItem object
waitingForClicks = False #whether or not we are waiting for the user to pick an atom
def pickAtoms():
"""Prompt the user to pick an atoms to extend a stretch of nucleotides. Alternatively, if
we're already waiting for the user to pick an atom, then cancel the pending pick.
ARGUMENTS:
None
RETURNS:
None
"""
#create the citation pop-up if necessary
createCitationPopup()
#make sure that we're not waiting on a pending rotamerize call
rotamerize.clearPendingRotamerize()
global waitingForClicks
if waitingForClicks:
#if we're already in the middle of an extend chain call, then cancel the pending call
print "Rotamerize cancelled"
add_status_bar_text("Extend chain cancelled")
clear_pending_picks() #tell Coot to stop waiting for the user to click on atoms
__setMenuToExtendChain()
waitingForClicks = False
else:
#if we're not in the middle of a extend chain call, then start one
#make sure that there is a refinement map set
if imol_refinement_map() == -1:
print "No refinement map set for RCrane extend chain"
selectMapDialog(pickAtoms)
return
print "Click on a nucleotide to extend"
add_status_bar_text("Pick a nucleotide [Ctrl Left-mouse rotates the view]...")
__setMenuToCancel()
waitingForClicks = True
user_defined_click(1, extendChainFromAtomSpec)
def extendChainFromAtomSpec(atomSpec):
"""Rotamerize a stretch of nucleotides
ARGUMENTS:
atomSpec - an atom spec specifying the start of the chain extension
RETURNS:
False if there is a problem with the selected nucleotide
None otherwise
"""
#stop waiting for the user to cancel the rotamerize
__setMenuToExtendChain()
global waitingForClicks
waitingForClicks = False
#print "In extendChainFromAtomSpec"
#print "atomSpec =", atomSpec
(mol, chain, resNum, insCode, atom, alt) = atomSpec[1:]
atom = atom.strip()
try:
pseudoMol = PseudoMolecule(createFromMolecule = mol, chain = chain)
except PseudoMoleculeError, err:
#if creating the PseudoMolecule raises an error, then it means that molecule we clicked on contains
#anisotropic temperature records and we're using a version of Coot that's older than rev 3631
#so clear_and_update_molecule can't handle anisotropic records
print err
#add_status_bar_text(str(err))
#Since this isn't an intuitive problem (i.e. it's far less obvious for the user than "you clicked on
#two different chains"), we create a dialog box instead of a status bar message
createRCraneErrorDialog(str(err) + "\n\nRotamerization canceled.")
return False
resNumFull = str(resNum) + str(insCode)
resIndex = pseudoMol.resIndex(resNum)
#figure out what direction to extend in
connectedToNext = pseudoMol.connectedToNextFromIndex(resIndex)
connectedToPrev = pseudoMol.connectedToPrevFromIndex(resIndex)
curResIsPhosOnly = pseudoMol.isOnlyPhosGroupFromIndex(resIndex)
nextResIsPhosOnly = False
if connectedToNext:
nextResIsPhosOnly = pseudoMol.isOnlyPhosGroupFromIndex(resIndex+1)
extendDir = None #the direction to extend the chain in
if curResIsPhosOnly:
if connectedToPrev:
#we're building on a 3' phosphate
extendDir = 3
else:
#the selected residue is an isolated phosphate group, so we can't do anything
print "Nucleotide " + resNumFull + " is an isolated phosphate group. Cannot extend."
add_status_bar_text("Nucleotide " + resNumFull + " is an isolated phosphate group. Cannot extend.")
return False
elif nextResIsPhosOnly:
if connectedToPrev:
#we're at the 3' end of the chain and there is a 3' phosphate
extendDir = 3
resIndex += 1 #increment the resIndex so that it points to the real last residue of the segment
curResIsPhosOnly = True
resNumFull = pseudoMol.resNumFull(resIndex) #update the residue name in case we need to use it for later error messages
else:
#we're dealing with a single nt containing a 3' phosphate
#we need to decide direction based on the atom clicked
extendDir = __decideDirectionFromAtom(atom, resNumFull)
if not extendDir: return False #if we couldn't determine a direction from the atom, then cancel
elif connectedToNext and connectedToPrev:
print "Nucleotide " + resNumFull + " is not at the end of a chain"
add_status_bar_text("Nucleotide " + resNumFull + " is not at the end of a chain")
return False
elif connectedToNext:
extendDir = 5
elif connectedToPrev:
extendDir = 3
else:
#we're dealing with a single nt not containing a 3' phosphate
#we need to decide direction based on the atom clicked
extendDir = __decideDirectionFromAtom(atom, resNumFull)
if not extendDir: return False #if we couldn't determine a direction from the atom, then cancel
#make sure that the residue we're going to extend doesn't have an insertion code
# (also check the next and previous residues)
if pseudoMol.resNum(resIndex)[1] != "":
reportInsCodeError(resNumFull)
return False
if connectedToNext and pseudoMol.resNum(resIndex+1)[1] != "":
reportInsCodeError(pseudoMol.resNumFull(resIndex+1))
return False
if connectedToPrev and pseudoMol.resNum(resIndex-1)[1] != "":
reportInsCodeError(pseudoMol.resNumFull(resIndex-1))
return False
#figure out which residue we should be checking for PDB2 naming and missing glycosidic bond atoms
if curResIsPhosOnly:
resIndexToCheck = resIndex - 1
resNumFullToCheck = pseudoMol.resNumFull(resIndex - 1)
else:
resIndexToCheck = resIndex
resNumFullToCheck = resNumFull
#make sure that this nucleotide isn't mdofied
resType = pseudoMol.resTypeFromIndex(resIndexToCheck)
if resType not in STANDARD_BASES:
reportModifiedNucError(resNumFullToCheck, resType)
return False
#make sure that the molecule (or at least the end of it we're going to be building on) uses PDB3 atom naming
if pseudoMol.checkPDB2FromIndex(resIndexToCheck):
reportPDB2Error(resNumFullToCheck)
return False
#the current residue must have a glycosidic bond
baseType = pseudoMol.resTypeFromIndex(resIndexToCheck)
if baseType == "A" or baseType== "G":
glyN = "N9"
else:
#we already know that the base isn't modified because we checked above
glyN = "N1"
if pseudoMol.getAtomCoordsFromIndex("C1'", resIndexToCheck) is None:
print "Nucleotide " + resNumFullToCheck + " does not have a C1' atom. Cannot extend chain."
add_status_bar_text("Nucleotide " + resNumFullToCheck + " does not have a C1' atom. Cannot extend chain.")
return False
if pseudoMol.getAtomCoordsFromIndex(glyN, resIndexToCheck) is None:
print "Nucleotide " + resNumFullToCheck + " does not have an " + glyN + " atom. Cannot extend chain."
add_status_bar_text("Nucleotide " + resNumFullToCheck + " does not have an " + glyN + " atom. Cannot extend chain.")
return False
#make sure that this isn't an isolated nt without any phosphates
curPhosCoords = pseudoMol.getPhosCoordsFromIndex(resIndex) #only used for sanity checking
if not connectedToNext and not connectedToPrev and curPhosCoords is None:
print "Nucleotide " + resNumFull + "is an isolated nucleotides without any phosphates. Cannot extend chain."
add_status_bar_text("Nucleotide " + resNumFull + "is an isolated nucleotides without any phosphates. Cannot extend chain.")
return False
#make sure that we're not building 3'->5' on a nucleotide without a 3' phosphate (because we can't determine pseudotorsions w/o a 3' phos)
if extendDir == 5 and not connectedToNext:
print "Cannot extend nucleotide " + resNumFull + " in the 5' direction without a 3' phosphate."
print " Please add a 3' phosphate by extending the chain in the 3' direction."
add_status_bar_text("Cannot extend nucleotide " + resNumFull + " in the 5' direction without a 3' phosphate.")
return False
print "About to extend the chain in the " + str(extendDir) + "' direction"
#tell the PseudoMolecule object where to insert new residues
pseudoMol.setResInsertionPoint(resIndex)
#save the molecule state so we can restore it if we cancel the extend
pseudoMol.saveMoleculeState()
TraceGui(direction = extendDir, existingMolecule = pseudoMol, resIndexToExtend = resIndex)
ATOMS3P = frozenset(["C3'", "O3'"])
#atoms that count as a 3' selection
ATOMS5P = frozenset(["P", "OP1", "OP2", "OP3", "O5'", "C5'", "C4'"])
#atoms that count as a 5' selection
def __decideDirectionFromAtom(atomName, resNum):
"""Determine the direction to extend the chain based on the atom name (for use with isolated nuclotides)
ARGUMENTS:
atomName - the atom name
resNum - the residue number of the current nucleotide (used in reporting errors)
RETURNS:
False if the direction cannot be determined from the atom name
3 or 5 otherwise
EFFECTS:
if the direction cannot be determined from the atom name, an error will be reported to the user
(and False will be returned)
"""
if atomName in ATOMS3P:
return 3
elif atomName in ATOMS5P:
return 5
else:
print "Nucleotide " + resNum + " is an isolated nucleotide. Please select either the O5' or"
print "O3' atom to extend in the 5' or 3' direction, respectively."
add_status_bar_text("Must select either O5' or O3' of isolated nucleotides.")
return False
def storeMenuItem(menuItem, resetLabel = False):
"""ARGUMENTS:
menuItem - the gtk MenuItem to store
OPTIONAL ARGUMENTS:
resetLabel - whether we should reset the menu label to "Extend chain..."
(and also clear all pending atom picks in Coot)
This is intended for use when we're reloading RCrane
Defaults to False
RETURNS:
None
EFFECTS:
stores menuItem in the module-level variable __menuItem
"""
global __menuItem
__menuItem = menuItem
#if we're being called during an RCrane reload, then reset everything back to the appropriate starting position
global waitingForClicks
if resetLabel:
waitingForClicks = False #this is redundant with module initialization, but can't hurt to do
#in case this function is called in an unexpected way
__setMenuToExtendChain()
clear_pending_picks()
def __setMenuToExtendChain():
"""Set the menu entry labels to "Extend chain..."
ARGUMENTS:
None
RETURNS:
None
"""
if __menuItem is not None:
__menuItem.get_child().set_text("Extend chain...")
def __setMenuToCancel():
"""Set the menu entry label to "Cancel extend chain..."
ARGUMENTS:
None
RETURNS:
None
"""
if __menuItem is not None:
__menuItem.get_child().set_text("Cancel extend chain...")
def clearPendingExtendChain():
"""Cancel any pending extend chain call (intended to be called by rotamerize)
ARGUMENTS:
None
RETURNS:
True if there was a pending extend chain call
False otherwise
"""
global waitingForClicks
if waitingForClicks:
#if we're already in the middle of a extend chain call, then cancel the pending extend chain
print "Extend chain cancelled"
clear_pending_picks() #tell Coot to stop waiting for the user to click on atoms
__setMenuToExtendChain()
waitingForClicks = False
return True
else:
return False | jlec/coot | rcrane/extendChain.py | Python | gpl-3.0 | 14,026 |
import datapointbasic.searchtools
import datapointbasic.forecast
from datapointbasic.tools import ApiManager
def placesearch(api_key=None):
"""
Function placesearch creates a new instance of the datapointbasic
LocationSearch class, and returns it. The methods of the LocationSearch can
then be used to search for locations in the Met Office DataPoint.
The single input required is a valid API key for DataPoint.
The function returns a LocationSearch object.
"""
api = ApiManager(api_key)
search_obj = datapointbasic.searchtools.LocationSearch(api.api_key)
return search_obj
def locationforecast(place_name, api_key=None):
"""
Function locationforecast creates a new instance of the datapointbasic
FullForecast class, and returns it. You can then obtain the conditions for
each day from the day attributes.
e.g.
forecast = locationforecast("Exeter", "aaaa-bbbb-cccc-dddd-eeee")
temperatures, units = forecast.day0.temperature()
The inputs required are a valid site name (as a string) and a valid API key
for DataPoint. The function returns a FullForecast object.
"""
api = ApiManager(api_key)
search_obj = datapointbasic.forecast.FullForecast(api, place_name)
return search_obj
| PaleTomato/datapoint-basic | datapointbasic/__init__.py | Python | gpl-3.0 | 1,351 |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from apache.aurora.client.cli.client import AuroraLogConfigurationPlugin
def test_log_plugin_enabled():
plugin = AuroraLogConfigurationPlugin()
plugin.before_dispatch(["aurora", "version", "--verbose"])
assert logging.getLogger().getEffectiveLevel() == logging.DEBUG
assert logging.getLogger('requests_kerberos').getEffectiveLevel() == logging.DEBUG
def test_log_plugin_disabled():
plugin = AuroraLogConfigurationPlugin()
plugin.before_dispatch(["aurora", "version"])
assert logging.getLogger().getEffectiveLevel() == logging.INFO
assert logging.getLogger('requests_kerberos').getEffectiveLevel() == logging.CRITICAL
| protochron/aurora | src/test/python/apache/aurora/client/cli/test_client.py | Python | apache-2.0 | 1,206 |
#coding: utf-8
"""
@Author: Well
@Date: 2014 - 05 - 04
"""
# quick_sort
# 1. 从数列中挑出一个元素,称为 "基准"(pivot),
# 2. 重新排序数列,所有元素比基准值小的摆放在基准前面,所有元素比基准值大的摆在基准的后面(相同的数可以到任一边)。
# 在这个分区退出之后,该基准就处于数列的中间位置。这个称为分区(partition)操作。
# 3. 递归地(recursive)把小于基准值元素的子数列和大于基准值元素的子数列排序.
import sys
import random
lenth = 30
def qsort(arr,left,right):
key = arr[right]
lp = left
rp = right
if lp == rp: return
while True:
while (arr[lp] >= key) and (rp > lp):
lp = lp + 1
while (arr[rp] <= key) and (rp > lp):
rp = rp - 1
arr[lp], arr[rp] = arr[rp], arr[lp]
if lp >= rp : break
arr[rp], arr[right] = arr[right], arr[rp]
if left < lp:
qsort(arr,left,lp-1)
qsort(arr,rp,right)
def main():
arr = []
sys.setrecursionlimit(100000)
for i in range(lenth):
arr.append(random.randint(0,1000))
qsort(arr,0,lenth-1)
print arr
if __name__ == '__main__':
for i in range(10):
main() | neiltest/neil_learn_python | src/learn_python/python_other/neil_18_quick_sort.py | Python | mit | 1,262 |
#-*- coding: utf-8 -*-
#
# Krzysztof „krzykwas” Kwaśniewski
# Gdańsk, 18-07-2012
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ...data.model.DataMapping import DataMapping
from ...data.model.DstServer import DstServer
from ...data.model.DstServerMapping import DstServerMapping
from ...data.model.MappedObject import MappedObject
from ...data.model.SrcServer import SrcServer
from ...data.provider.DataProviderFactory import DataProviderFactory
from ...data.sender.DataSenderFactory import DataSenderFactory
from ..Settings import Settings
import logging
import lxml.etree as etree
class Generator(object):
"""
Interactively generates a configuration file
"""
def __init__(self, settings):
"""
Real settings created on the basis of command line arguments given by the user
"""
self.__settings = settings
"""
Used to store user-input data before generating an xml file to save real settings
object from pollution
"""
self.__generatedSettings = Settings()
self.__configureLogger()
#For compatibility between Python 2.x and 3.x
try:
self.__input = raw_input
except:
self.__input = input
def generate(self):
self.__logger.info("========= Generating a configuration file for PyAgent =========")
self.__getSrcServers()
self.__getDstServers()
self.__getDataMappings()
self.__createConfigurationFile()
def __configureLogger(self):
class GeneratorFormatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self, "[generator] %(message)s", None)
class GeneratorHandler(logging.StreamHandler):
def __init__(self):
logging.StreamHandler.__init__(self)
self.setFormatter(GeneratorFormatter())
self.__logger = logging.getLogger(__name__)
self.__logger.addHandler(GeneratorHandler())
self.__logger.propagate = False
def __getSrcServers(self):
self.__logger.info("*** Configuring source servers")
self.__logger.info("Define a new source server...")
self.__tryAddNewSrcServer()
#Show currently defined servers
self.__logger.info("Defined source servers:")
self.__printList(self.__generatedSettings.getSrcServers())
while self.__getValue("Define a new source server? [Y/n]").lower() != "n":
self.__tryAddNewSrcServer()
#Show currently defined servers
self.__logger.info("Defined source servers:")
self.__printList(self.__generatedSettings.getSrcServers())
def __tryAddNewSrcServer(self):
#Ask for the details of a new server
name = self.__getNonEmptyValue("New server name:")
if name in self.__generatedSettings.getSrcServers():
self.__logger.info("Choose a new name, this one has been already used.")
else:
protocols = self.__getExistingSrcProtocols()
protocol = self.__getValueFromList("Protocol:", protocols)
uri, username, password = self.__getValues("Server uri:", "Username:", "Password:")
srcServer = SrcServer(name, protocol, uri, username, password)
self.__generatedSettings.getSrcServers()[srcServer.getName()] = srcServer
def __getDstServers(self):
self.__logger.info("*** Configuring destination servers")
self.__logger.info("Define a new destination server...")
self.__tryAddNewDstServer()
#Show currently defined servers
self.__logger.info("Defined destination servers:")
self.__printList(self.__generatedSettings.getDstServers())
while self.__getValue("Define a new destination server? [Y/n]").lower() != "n":
self.__tryAddNewDstServer()
#Show currently defined servers
self.__logger.info("Defined destination servers:")
self.__printList(self.__generatedSettings.getDstServers())
def __tryAddNewDstServer(self):
#Ask for the details of a new server
name = self.__getNonEmptyValue("New server name:")
if name in self.__generatedSettings.getDstServers():
self.__logger.info("Choose a new name, this one has been already used.")
else:
protocols = self.__getExistingDstProtocols()
protocol = self.__getValueFromList("Protocol:", protocols)
uri, username, password = self.__getValues("Server uri:", "Username:", "Password:")
dstServer = DstServer(name, protocol, uri, username, password)
self.__generatedSettings.getDstServers()[dstServer.getName()] = dstServer
def __getExistingDstProtocols(self):
dataSenderFactory = DataSenderFactory();
protocols = [
name[:name.find("DataSender")].lower() for name in dataSenderFactory.getDataSenderNames()
]
return protocols
def __getExistingSrcProtocols(self):
dataProviderFactory = DataProviderFactory();
protocols = [
name[:name.find("DataProvider")].lower() for name in dataProviderFactory.getDataProviderNames()
]
return protocols
def __getDataMappings(self):
self.__logger.info("*** Configuring data mappings")
while self.__getValue("Define a new data mapping? [Y/n]").lower() != "n":
self.__tryAddNewDataMapping()
def __tryAddNewDataMapping(self):
srcServers = self.__generatedSettings.getSrcServers()
srcServer = self.__getSourceServer(srcServers)
mappedObject = self.__getMappedObject()
self.__logger.info("*** Where to send the data provided by the source server?")
self.__logger.info("Define a new destination server mapping...")
dstServersMappings = []
dstServers = self.__generatedSettings.getDstServers()
while True:
dstServer = self.__getDestinationServer(dstServers)
mapTo = self.__getValue("Choose a property to which data should be bound.")
updateInterval = self.__getNonNegativeIntegerValue("How often to refresh the measurement (in seconds)?")
dstServerMapping = DstServerMapping(dstServer, mapTo, updateInterval)
dstServersMappings.append(dstServerMapping)
self.__logger.info("*** Do you want to send the same data somewhere else as well?")
if self.__getValue("Define a new destination server mapping? [Y/n]").lower() == "n":
break
dataMapping = DataMapping(srcServer, mappedObject, dstServersMappings)
self.__generatedSettings.getDataMappings().append(dataMapping)
def __getSourceServer(self, srcServers):
self.__logger.info("*** Where to take the data from?")
srcServerName = self.__getValueFromList("Source server:", srcServers)
return srcServers[srcServerName]
def __getDestinationServer(self, dstServers):
dstServerName = self.__getValueFromList("Destination server:", dstServers)
return dstServers[dstServerName]
def __getMappedObject(self):
self.__logger.info("*** How to select data from the server you have just chosen?")
namespace, name = self.__getValues("Namespace:", "Name:")
index = self.__getNonNegativeIntegerValue("Index (instance number):")
attribute = self.__getValue("Attribute:")
mappedObject = MappedObject(namespace, name, index, attribute)
return mappedObject
def __getValue(self, message):
self.__logger.info(message)
return self.__input()
def __getValues(self, *messages):
values = []
for message in messages:
values.append(self.__getValue(message))
return values
def __getValueFromList(self, message, values):
self.__logger.info(message)
while True:
self.__logger.info("Select one of: [{0}]".format(", ".join(values)))
value = self.__input()
if value in values:
return value
def __getNonEmptyValue(self, message):
while True:
value = self.__getValue(message)
if value != "":
return value
def __getNonNegativeIntegerValue(self, message):
while True:
try:
value = int(self.__getValue(message))
if value >= 0:
return value
except ValueError:
pass
self.__logger.info("Enter a non-negative integer value.")
def __printList(self, elements):
i = 1
for element in elements:
self.__logger.info("\t{0}) {1}".format(i, element))
i += 1
self.__logger.info("")
def __createConfigurationFile(self):
root = etree.Element("settings")
srcServersNode = etree.SubElement(root, "src-servers")
for srcServer in self.__generatedSettings.getSrcServers().values():
srcServerNode = etree.SubElement(srcServersNode, "src-server")
etree.SubElement(srcServerNode, "name").text = srcServer.getName()
etree.SubElement(srcServerNode, "protocol").text = srcServer.getProtocol()
etree.SubElement(srcServerNode, "uri").text = srcServer.getUri()
etree.SubElement(srcServerNode, "username").text = srcServer.getUsername()
etree.SubElement(srcServerNode, "password").text = srcServer.getPassword()
dstServersNode = etree.SubElement(root, "dst-servers")
for dstServer in self.__generatedSettings.getDstServers().values():
dstServerNode = etree.SubElement(dstServersNode, "dst-server")
etree.SubElement(dstServerNode, "name").text = dstServer.getName()
etree.SubElement(dstServerNode, "protocol").text = dstServer.getProtocol()
etree.SubElement(dstServerNode, "uri").text = dstServer.getUri()
etree.SubElement(dstServerNode, "username").text = dstServer.getUsername()
etree.SubElement(dstServerNode, "password").text = dstServer.getPassword()
dataMappingsNode = etree.SubElement(root, "data-mappings")
for dataMapping in self.__generatedSettings.getDataMappings():
dataMappingNode = etree.SubElement(dataMappingsNode, "data-mapping")
srcServerRef = etree.SubElement(dataMappingNode, "src-server")
etree.SubElement(srcServerRef, "name").text = dataMapping.getSrcServer().getName()
mappedObjectNode = etree.SubElement(dataMappingNode, "mapped-object")
etree.SubElement(mappedObjectNode, "namespace").text = dataMapping.getMappedObject().getNamespace()
etree.SubElement(mappedObjectNode, "name").text = dataMapping.getMappedObject().getName()
etree.SubElement(mappedObjectNode, "index").text = str(dataMapping.getMappedObject().getIndex())
etree.SubElement(mappedObjectNode, "attribute").text = dataMapping.getMappedObject().getAttribute()
dstServersMappingsNode = etree.SubElement(dataMappingNode, "dst-servers-mappings")
for dstServerMapping in dataMapping.getDstServersMappings():
dstServerMappingNode = etree.SubElement(dstServersMappingsNode, "dst-server-mapping")
etree.SubElement(dstServerMappingNode, "name").text = dstServerMapping.getDstServer().getName()
etree.SubElement(dstServerMappingNode, "map-to").text = dstServerMapping.getMapTo()
etree.SubElement(dstServerMappingNode, "update-interval").text = str(dstServerMapping.getUpdateInterval())
etree.SubElement(root, "callbacks")
etree.ElementTree(element=root).write(
self.__settings.getConfigurationFile(),
xml_declaration=True,
encoding="utf-8",
pretty_print=True
)
| krzykwas/rhqagent | pyagent/settings/configurationFile/Generator.py | Python | gpl-3.0 | 11,200 |
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tensorflow_datasets.image_classification import cars196
import tensorflow_datasets.testing as tfds_test
class Cars196Test(tfds_test.DatasetBuilderTestCase):
DATASET_CLASS = cars196.Cars196
SPLITS = {'train': 2, 'test': 2}
DL_EXTRACT_RESULT = {
'train': 'train',
'test': 'test',
'extra': 'extra',
'test_annos': 'cars_test_annos_withlabels.mat'
}
if __name__ == '__main__':
tfds_test.test_main()
| tensorflow/datasets | tensorflow_datasets/image_classification/cars196_test.py | Python | apache-2.0 | 1,050 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-7 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
import pygtk
pygtk.require('2.0')
import gtk
import rose.config_editor
class ComboBoxValueWidget(gtk.HBox):
"""This is a class to add a combo box for a set of variable values.
It needs to have some allowed values set in the variable metadata.
"""
FRAC_X_ALIGN = 0.9
def __init__(self, value, metadata, set_value, hook, arg_str=None):
super(ComboBoxValueWidget, self).__init__(homogeneous=False,
spacing=0)
self.value = value
self.metadata = metadata
self.set_value = set_value
self.hook = hook
comboboxentry = gtk.ComboBox()
liststore = gtk.ListStore(str)
cell = gtk.CellRendererText()
cell.xalign = self.FRAC_X_ALIGN
comboboxentry.pack_start(cell)
comboboxentry.add_attribute(cell, 'text', 0)
var_values = self.metadata[rose.META_PROP_VALUES]
var_titles = self.metadata.get(rose.META_PROP_VALUE_TITLES)
for k, entry in enumerate(var_values):
if var_titles is not None and var_titles[k]:
liststore.append([var_titles[k] + " (" + entry + ")"])
else:
liststore.append([entry])
comboboxentry.set_model(liststore)
if self.value in var_values:
index = self.metadata['values'].index(self.value)
comboboxentry.set_active(index)
comboboxentry.connect('changed', self.setter)
comboboxentry.connect('button-press-event',
lambda b: comboboxentry.grab_focus())
comboboxentry.show()
self.pack_start(comboboxentry, False, False, 0)
self.grab_focus = lambda: self.hook.get_focus(comboboxentry)
self.set_contains_error = (lambda e:
comboboxentry.modify_bg(gtk.STATE_NORMAL,
self.bad_colour))
def setter(self, widget):
index = widget.get_active()
self.value = self.metadata[rose.META_PROP_VALUES][index]
self.set_value(self.value)
return False
| scwhitehouse/rose | lib/python/rose/config_editor/valuewidget/combobox.py | Python | gpl-3.0 | 3,038 |
import os
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# Webserver host and port
HOST = os.environ.get('DATAPUSHER_HOST', '0.0.0.0')
PORT = os.environ.get('DATAPUSHER_PORT', 8800)
# Database
SQLALCHEMY_DATABASE_URI = os.environ.get('DATAPUSHER_SQLALCHEMY_DATABASE_URI', 'sqlite:////tmp/job_store.db')
# Download and streaming settings
MAX_CONTENT_LENGTH = int(os.environ.get('DATAPUSHER_MAX_CONTENT_LENGTH', '1024000'))
CHUNK_SIZE = int(os.environ.get('DATAPUSHER_CHUNK_SIZE', '16384'))
CHUNK_INSERT_ROWS = int(os.environ.get('DATAPUSHER_CHUNK_INSERT_ROWS', '250'))
DOWNLOAD_TIMEOUT = int(os.environ.get('DATAPUSHER_DOWNLOAD_TIMEOUT', '30'))
# Verify SSL
SSL_VERIFY = os.environ.get('DATAPUSHER_SSL_VERIFY', True)
# logging
#LOG_FILE = '/tmp/ckan_service.log'
| ckan/datapusher | deployment/datapusher_settings.py | Python | agpl-3.0 | 876 |
import numpy as np
from track_utils.cell_container import _distance_diff, _totalintensity_difference
from track_utils.cell_calculation import calc_cell_distance, calc_cell_massdiff
try:
from covertrack.cell import CellListMaker
from covertrack.utils.imreg import translation
from covertrack.utils.seg_utils import skilabel, watershed
from covertrack.segmentation.segment_utils.filters import sizefilterandopen
from covertrack.utils.seg_utils import calc_neck_score_thres_filtered, labels2outlines, cut_neck
except:
from cell import CellListMaker
from utils.imreg import translation
from utils.seg_utils import skilabel, watershed
from segmentation.segment_utils.filters import sizefilterandopen
from utils.seg_utils import calc_neck_score_thres_filtered, labels2outlines, cut_neck
from track_utils.cell_calculation import find_one_to_one_assign, call_lap, convert_coords_to_linear
from track_utils.cell_calculation import find_cells_overlap_linear_coords, flatlist, find_coords_overlap_coords
from track_utils.cell_calculation import pick_closer_binarycostmat
from skimage.segmentation import clear_border
from skimage.measure import regionprops
from logging import getLogger
from track_utils.mi_align import MutualInfoAlignerMultiHypothesis
'''
To track cells, simply set "cell.next = cell_in_curr_frame"
'''
logger = getLogger('covertrack.tracking')
def cell_nearest_neighbor(img, label, container, holder, DISPLACEMENT=100, MASSTHRES=0.2):
"""lazier implementation but may be easier and cleaner to write.
A simple example for tracking.
Args:
DISPLACEMENT (int): The maximum distance (in pixel)
MASSTHRES (float): The maximum difference of total intensity changes.
0.2 means it allows for 20% total intensity changes.
"""
untr_prev, untr_curr = container.unlinked
for cell in untr_prev:
for cand in untr_curr:
within_dist = calc_cell_distance(cell, cand) <= DISPLACEMENT
within_mass = abs(calc_cell_massdiff(cell, cand)) < MASSTHRES
if within_dist and within_mass:
cell.next = cand
return container
def nearest_neighbor(img, label, container, holder, DISPLACEMENT=100, MASSTHRES=0.2):
"""Link two cells if they are the only two cells within DISPLACEMENT and MASSTHRES.
Args:
DISPLACEMENT (int): The maximum distance (in pixel)
MASSTHRES (float): The maximum difference of total intensity changes.
0.2 means it allows for 20% total intensity changes.
"""
DISPLACEMENT = DISPLACEMENT
MASSTHRES = MASSTHRES
withinarea = container._dist_unlinked < DISPLACEMENT
withinmass = abs(container._masschange_unlinked) < MASSTHRES
binarymat = withinarea * withinmass
binarycost = binarymat
good_curr_idx, good_prev_idx = find_one_to_one_assign(binarycost)
prev_cells, curr_cells = container.unlinked
for ci, pi in zip(good_curr_idx, good_prev_idx):
prev_cells[pi].next = curr_cells[ci]
return container
def run_lap(img, label, container, holder, DISPLACEMENT=100, MASSTHRES=0.2):
'''Linear assignment problem for mammalian cells.
Cost matrix is simply the distance.
costDie and costBorn are variables changing over frame. Update it through holder.
Args:
DISPLACEMENT (int): The maximum distance (in pixel)
MASSTHRES (float): The maximum difference of total intensity changes.
0.2 means it allows for 20% total intensity changes.
'''
dist = container._dist_unlinked
massdiff = container._masschange_unlinked
'''search radius is now simply set by maximum displacement possible.
In the future, I might add data-driven approach mentioned in LAP paper (supple pg.7)'''
dist[dist > DISPLACEMENT] = np.Inf # assign a large cost for unlikely a pair
# dist[abs(massdiff) > MASSTHRES] = np.Inf
cost = dist
if cost.shape[0] == 0 or cost.shape[1] == 0:
return container
# Define initial costBorn and costDie in the first frame
if not hasattr(holder, 'cost_born') or hasattr(holder, 'cost_die'):
holder.cost_born = np.percentile(cost[~np.isinf(cost)], 80)
holder.cost_die = np.percentile(cost[~np.isinf(cost)], 80)
# try:
binary_cost = call_lap(cost, holder.cost_die, holder.cost_born)
# The first assignment of np.Inf is to reduce calculation of linear assignment.
# This part will make sure that cells outside of these range do not get connected.
binary_cost[abs(massdiff) > MASSTHRES] = False
binary_cost[dist > DISPLACEMENT] = False
gp, gc = np.where(binary_cost)
good_prev_idx, good_curr_idx = list(gp), list(gc)
prev_cells, curr_cells = container.unlinked
for ci, pi in zip(good_curr_idx, good_prev_idx):
prev_cells[pi].next = curr_cells[ci]
# update cost
dist = container._dist_unlinked
if dist.size:
cost = np.max(dist)*1.05
if cost != 0: # solver freezes if cost is 0
holder.cost_born, holder.cost_die = cost, cost
return container
def watershed_distance(img, label, container, holder, ERODI=5,
DEBRISAREA=50, DISPLACEMENT=50, MASSTHRES=0.2):
'''
Adaptive segmentation by using tracking informaiton.
watershed existing label, meaning make a cut at the deflection.
After the cuts, objects will be linked if they are within DISPLACEMENT and MASSTHRES.
If two candidates are found, it will pick a closer one.
Args:
ERODI (int): Erosion size element for generating watershed seeds.
Smaller ERODI will allow more cuts.
DISPLACEMENT (int): The maximum distance (in pixel)
MASSTHRES (float): The maximum difference of total intensity changes.
0.2 means it allows for 20% total intensity changes.
'''
untr_prev, untr_curr = container.unlinked
mask_untracked = container._label_untracked.astype(bool)
wshed_label = watershed(mask_untracked, ERODI)
wshed_label = skilabel(sizefilterandopen(wshed_label, DEBRISAREA, np.Inf, 0))
newcells = CellListMaker(img, wshed_label, holder, holder.frame).make_list()
distanceUntracked = _distance_diff(untr_prev, newcells)
masschangeUntracked = _totalintensity_difference(untr_prev, newcells)
withinarea = distanceUntracked < DISPLACEMENT
withinmass = abs(masschangeUntracked) < MASSTHRES
withinareaMass = withinarea * withinmass
withinareaMass = pick_closer_binarycostmat(withinareaMass, distanceUntracked)
good_curr_idx, good_prev_idx = find_one_to_one_assign(withinareaMass)
# update the link
for ci, pi in zip(good_curr_idx, good_prev_idx):
untr_prev[pi].next = newcells[ci]
# Get all linear coordinates from good newly segmented cells
good_curr_coords = [newcells[n].prop.coords for n in good_curr_idx]
lin_curr_coords = [convert_coords_to_linear(i, holder.img_shape) for i in flatlist(good_curr_coords)]
# find cells in old mask (in current) that overlaps with good new cells
old_cells_to_remove, lin_old_coords_remove = find_coords_overlap_coords(untr_curr, lin_curr_coords, holder.img_shape)
# find cells in new mask which overlaps with the cells in old mask
newcells_to_update = find_cells_overlap_linear_coords(newcells, lin_old_coords_remove, holder.img_shape)
# remove old cells
for old_cell in old_cells_to_remove:
container.curr_cells.remove(old_cell)
# add new cells
container.curr_cells.extend(newcells_to_update)
return container
def jitter_correction_label(img, label, container, holder):
'''Simple but simpler jitter correction based on markers.
It would not work if you have too few objects. This will add jitters to corr_x and corr_y.
Values of jitter is relative to the first frame, so they accumulate jitters
in consecutive frames in holder.jitter.
Add this as a first algorithm in track_args when use.
'''
if not hasattr(holder, 'prev_label'):
holder.prev_label = label
if np.any(holder.prev_label):
prevlabel = holder.prev_label
ocCurr = label
ocPrev = prevlabel
jitter = translation(ocPrev, ocCurr)
if not hasattr(holder, 'jitter'):
holder.jitter = [0, 0]
jitter = translation(ocPrev, ocCurr)
for i in (0, 1):
holder.jitter[i] = holder.jitter[i] + jitter[i]
for cell in container.curr_cells:
cell.prop.jitter_x = holder.jitter[1]
cell.prop.jitter_y = holder.jitter[0]
cell.prop.corr_x = cell.prop.corr_x + holder.jitter[1]
cell.prop.corr_y = cell.prop.corr_y + holder.jitter[0]
logger.debug("\t\tA field moved {0} pix to x and {1} pix to y".format(*holder.jitter))
return container
def jitter_correction_label_at_frame(img, label, container, holder, FRAME=1):
"""
FRAME (List(int) or int): a list of frames to run jitter correction
"""
if isinstance(FRAME, int):
FRAME = [FRAME, ]
if not hasattr(holder, 'prev_label'):
holder.prev_label = label
if holder.frame in FRAME:
container = jitter_correction_label(img, label, container, holder)
logger.debug("\t\tA field moved {0} pix to x and {1} pix to y".format(*holder.jitter))
return container
def jitter_correction_mutualinfo(img, label, container, holder):
# FIXME
if not hasattr(holder, 'prev_img'):
holder.prev_img = img
if np.any(holder.prev_img):
prev_img = holder.prev_img
mialmh = MutualInfoAlignerMultiHypothesis(prev_img, img)
mialmh.execute()
# jitter = (mialmh._j, mialmh._i)
jitter = (mialmh._i, mialmh._j)
if not hasattr(holder, 'jitter'):
holder.jitter = [0, 0]
for i in (0, 1):
holder.jitter[i] = holder.jitter[i] + jitter[i]
for cell in container.curr_cells:
cell.prop.jitter_x = holder.jitter[1]
cell.prop.jitter_y = holder.jitter[0]
cell.prop.corr_x = cell.prop.corr_x + holder.jitter[1]
cell.prop.corr_y = cell.prop.corr_y + holder.jitter[0]
logger.debug("\t\tA field moved {0} pix to x and {1} pix to y".format(*holder.jitter))
return container
def track_neck_cut(img, label, container, holder, ERODI=5, DEBRISAREA=50, DISPLACEMENT=50,
MASSTHRES=0.2, LIM=10, EDGELEN=5, THRES_ANGLE=180, STEPLIM=10):
"""
Adaptive segmentation by using tracking informaiton.
Separate two objects by making a cut at the deflection. For each points on the outline,
it will make a triangle separated by EDGELEN and calculates the angle facing inside of concave.
EDGELEN (int): A length of edges of triangle on the nuclear perimeter.
THRES_ANGLE (int): Define the neck points if a triangle has more than this angle.
STEPLIM (int): points of neck needs to be separated by at least STEPLIM in parimeters.
"""
untr_prev, untr_curr = container.unlinked
label_untracked = container._label_untracked
unique_labels = np.unique(label_untracked)
unique_labels = unique_labels[unique_labels > 0]
newcells = []
all_new_cells = []
for label_id in unique_labels:
mask = label_untracked == label_id
cl_label = clear_border(mask)
outlines = labels2outlines(cl_label).astype(np.uint16)
rps = regionprops(outlines)
rps = [i for i in rps if i.perimeter > STEPLIM]
for cell in rps:
score, coords = calc_neck_score_thres_filtered(cell.coords, edgelen=EDGELEN, thres=THRES_ANGLE, steplim=STEPLIM)
if len(score) > 1:
r0, c0 = coords[0, :]
if coords.shape[0] > LIM:
coords = coords[:LIM, :]
for cand in coords[1:, :]:
untr_prev = container.unlinked[0]
cut_label = skilabel(cut_neck(cl_label, r0, c0, cand[0], cand[1]), conn=1)
new_cells_temp = CellListMaker(img, cut_label, holder, holder.frame).make_list()
if len(new_cells_temp) > 1:
distanceUntracked = _distance_diff(untr_prev, new_cells_temp)
masschangeUntracked = _totalintensity_difference(untr_prev, new_cells_temp)
withinarea = distanceUntracked < DISPLACEMENT
withinmass = abs(masschangeUntracked) < MASSTHRES
withinareaMass = withinarea * withinmass
withinareaMass = pick_closer_binarycostmat(withinareaMass, distanceUntracked)
good_curr_idx, good_prev_idx = find_one_to_one_assign(withinareaMass)
if len(good_curr_idx) > 0:
# update the link
all_new_cells.append(new_cells_temp)
for ci, pi in zip(good_curr_idx, good_prev_idx):
newcells.append(new_cells_temp[ci])
untr_prev[pi].next = new_cells_temp[ci]
break
good_curr_coords = [n.prop.coords for n in newcells]
lin_curr_coords = [convert_coords_to_linear(i, holder.img_shape) for i in flatlist(good_curr_coords)]
# find cells in old mask (in current) that overlaps with good new cells
old_cells_to_remove, lin_old_coords_remove = find_coords_overlap_coords(untr_curr, lin_curr_coords, holder.img_shape)
# find cells in new mask which overlaps with the cells in old mask
all_new_cells = [i for j in all_new_cells for i in j]
newcells_to_update = find_cells_overlap_linear_coords(all_new_cells, lin_old_coords_remove, holder.img_shape)
# remove old cells
for old_cell in old_cells_to_remove:
container.curr_cells.remove(old_cell)
# add new cells
container.curr_cells.extend(newcells_to_update)
return container
def back_track(img, label, container, holder, BACKFRAME=None):
"""
Implement tracking from the BACKFRAME frame to the beginning and then beginning to the end.
By running this, it will find a better segmentation in the first frame if you combine
with the adaptive segmentation such as track_neck_cut or watershed_distance.
This modifies self.pathset in call_tracking and dynamically change the behavior the of the loop.
If you have 4 frames, the overall loop changes from [0, 1, 2, 3] to [0, 1, 3, 2, 1, 0, 1, 2, 3].
Args:
BACKFRAME (int): decide which frames to start the back-tracking.
"""
ind = holder.pathset.index(holder.imgpath)
if not hasattr(holder, 'back_flag'):
holder.back_flag = True
if holder.back_flag:
for i in holder.pathset:
if holder.back_flag:
for ii in holder.pathset[:BACKFRAME]:
holder.pathset.insert(ind, ii)
holder.back_flag = False
holder.pathset.insert(ind, holder.pathset[-1])
return container | braysia/covertrack | covertrack/tracking/tracking_operations.py | Python | mit | 15,292 |
"""
Tests for dit.multivariate.secret_key_agreement.skar_lower_bounds
"""
import pytest
from dit.example_dists.intrinsic import *
from dit.multivariate import necessary_intrinsic_mutual_information
@pytest.mark.flaky(reruns=5)
@pytest.mark.parametrize('dist', [intrinsic_1, intrinsic_2, intrinsic_3])
def test_nimi_1(dist):
"""
Test against known values.
"""
nimi = necessary_intrinsic_mutual_information(dist, [[0], [1]], [2], bound_u=2, bound_v=4)
assert nimi == pytest.approx(dist.secret_rate, abs=1e-5)
| dit/dit | tests/multivariate/secret_key_agreement/test_necessary_intrinsic_mutual_information.py | Python | bsd-3-clause | 531 |
# -*- coding: utf-8 -*-
"""Generic Unit tests for the GDAL provider.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '2018-30-10'
__copyright__ = 'Copyright 2018, Nyall Dawson'
import os
from qgis.core import (
QgsProviderRegistry,
QgsDataProvider,
QgsRasterLayer,
QgsRectangle,
)
from qgis.testing import start_app, unittest
from qgis.PyQt.QtGui import qRed
from utilities import unitTestDataPath
start_app()
TEST_DATA_DIR = unitTestDataPath()
class PyQgsGdalProvider(unittest.TestCase):
def checkBlockContents(self, block, expected):
res = []
for r in range(block.height()):
res.extend([block.value(r, c) for c in range(block.width())])
self.assertEqual(res, expected)
def testCapabilities(self):
self.assertTrue(QgsProviderRegistry.instance().providerCapabilities("gdal") & QgsDataProvider.File)
self.assertTrue(QgsProviderRegistry.instance().providerCapabilities("gdal") & QgsDataProvider.Dir)
self.assertTrue(QgsProviderRegistry.instance().providerCapabilities("gdal") & QgsDataProvider.Net)
def testRasterBlock(self):
"""Test raster block with extent"""
path = os.path.join(unitTestDataPath(), 'landsat_4326.tif')
raster_layer = QgsRasterLayer(path, 'test')
self.assertTrue(raster_layer.isValid())
extent = QgsRectangle(17.94284482577178252, 30.23021770271909503, 17.94407867909909626, 30.23154272264058307)
block = raster_layer.dataProvider().block(1, extent, 2, 3)
self.checkBlockContents(block, [
125.0, 125.0,
125.0, 125.0,
125.0, 124.0,
])
full_content = [
125.0, 125.0, 125.0,
125.0, 125.0, 125.0,
125.0, 124.0, 125.0,
126.0, 127.0, 127.0,
]
extent = raster_layer.extent()
block = raster_layer.dataProvider().block(1, extent, 3, 4)
self.checkBlockContents(block, full_content)
extent = raster_layer.extent()
extent.grow(-0.0001)
block = raster_layer.dataProvider().block(1, extent, 3, 4)
self.checkBlockContents(block, full_content)
row_height = raster_layer.extent().height() / raster_layer.height()
for row in range(raster_layer.height()):
extent = raster_layer.extent()
extent.setYMaximum(extent.yMaximum() - row_height * row)
extent.setYMinimum(extent.yMaximum() - row_height)
block = raster_layer.dataProvider().block(1, extent, 3, 1)
self.checkBlockContents(block, full_content[row * 3:row * 3 + 3])
def testDecodeEncodeUriGpkg(self):
"""Test decodeUri/encodeUri geopackage support"""
uri = '/my/raster.gpkg'
parts = QgsProviderRegistry.instance().decodeUri('gdal', uri)
self.assertEqual(parts, {'path': '/my/raster.gpkg', 'layerName': None})
encodedUri = QgsProviderRegistry.instance().encodeUri('gdal', parts)
self.assertEqual(encodedUri, uri)
uri = 'GPKG:/my/raster.gpkg'
parts = QgsProviderRegistry.instance().decodeUri('gdal', uri)
self.assertEqual(parts, {'path': '/my/raster.gpkg', 'layerName': None})
encodedUri = QgsProviderRegistry.instance().encodeUri('gdal', parts)
self.assertEqual(encodedUri, '/my/raster.gpkg')
uri = 'GPKG:/my/raster.gpkg:mylayer'
parts = QgsProviderRegistry.instance().decodeUri('gdal', uri)
self.assertEqual(parts, {'path': '/my/raster.gpkg', 'layerName': 'mylayer'})
encodedUri = QgsProviderRegistry.instance().encodeUri('gdal', parts)
self.assertEqual(encodedUri, uri)
def testDecodeEncodeUriOptions(self):
"""Test decodeUri/encodeUri options support"""
uri = '/my/raster.pdf|option:DPI=300|option:GIVEME=TWO'
parts = QgsProviderRegistry.instance().decodeUri('gdal', uri)
self.assertEqual(parts, {'path': '/my/raster.pdf', 'layerName': None, 'openOptions': ['DPI=300', 'GIVEME=TWO']})
encodedUri = QgsProviderRegistry.instance().encodeUri('gdal', parts)
self.assertEqual(encodedUri, uri)
if __name__ == '__main__':
unittest.main()
| lbartoletti/QGIS | tests/src/python/test_provider_gdal.py | Python | gpl-2.0 | 4,428 |
import logging
from logging import FileHandler, Formatter
from flask import Flask, request
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_wtf.csrf import CSRFProtect
from flask_mail import Mail
from flask_login import LoginManager
from flask_babel import Babel
from buttersalt_saltapi import saltapi
from config import config
bootstrap = Bootstrap()
moment = Moment()
csrfprotect = CSRFProtect()
mail = Mail()
salt = saltapi.SaltApi()
babel = Babel()
login_manager = LoginManager()
login_manager.login_view = "user.login"
login_manager.session_protection = 'basic'
file_handler = FileHandler('ButterSalt.log')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
@babel.localeselector
def get_locale():
locale = request.accept_languages.best_match(['en', 'zh-cn', 'zh-tw'])
if locale == 'zh-cn' or 'zh-tw':
return 'zh'
else:
return 'en'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
salt.init_app(app)
login_manager.init_app(app)
csrfprotect.init_app(app)
babel.init_app(app)
app.logger.addHandler(file_handler)
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
from .views.run import run
from .views.dashboard import dashboard
from .views.saltstack import saltstack
from .views.user import user
from .views.error import error
app.register_blueprint(run)
app.register_blueprint(dashboard)
app.register_blueprint(saltstack)
app.register_blueprint(user)
app.register_blueprint(error)
return app
| lfzyx/ButterSalt | buttersalt/__init__.py | Python | mit | 1,915 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
i18n_dict = {
'ko': {
'youmustlogin': '로그인을 해야 합니다',
'invalidemail': '잘못된 이메일 주소입니다.',
'invalidusername': '잘못된 사용자명입니다. 사용자명은 최소 4글자여야 합니다.',
'invalidpassword': '잘못된 비밀번호입니다. 비밀번호는 최소 8글자여야 합니다.',
'passwordmismatch': '두 비밀번호가 일치하지 않습니다.',
'alreadyexistemail': '이미 존재하는 이메일입니다.',
'alreadyexistname': '이미 존재하는 사용자 이름입니다.',
'loginfailed': '잘못된 이메일, 계정명 또는 비밀번호입니다.',
'loginfailed-oauthuser': '외부 사이트 연동을 통해 가입된 사용자입니다. 가입한 서비스를 이용해 로그인을 해 주십시오.',
'wrongimage': '파일이 첨부되지 않았습니다.',
'notimage': '파일이 이미지가 아니거나 업로드가 가능한 이미지 종류(JPG, PNG, GIF, SVG)가 아닙니다.',
'accupdatesuccess': '계정 정보가 갱신되었습니다.',
'uploadsuccess': '업로드가 완료되었습니다.',
'signupsuccess': '가입이 완료되었습니다.',
'deletesuccess': '이미지가 삭제되었습니다.',
'invalidexpiretime': '잘못된 만료 시간입니다.',
'invalidexpiretime-toolong': '만료 시간은 1년을 넘을 수 없습니다.',
'nopassword': '비밀번호를 설정해야 합니다.',
'oauth-connected': '외부 계정이 ImgTL 계정에 연결되었습니다.',
'oauth-disconnected': '외부 계정 연결이 해제되었습니다.'
}
}
def i18n(key, lang='ko'):
try:
return i18n_dict[lang][key].decode('utf-8')
except KeyError:
return key
| imgtl/imgtl | imgtl/i18n.py | Python | mit | 1,878 |
from setuptools import setup, find_packages
from swift_setup import __version__ as version
from shutil import copytree, move, rmtree
import os
name = "swift-setup"
install_requires = []
try:
import fabric
except ImportError:
install_requires.append("fabric")
data_files = [('/etc/swift-setup', ['etc/swift-setup.conf-sample']),
('/etc/swift-setup/hosts', ['etc/hosts/generic',
'etc/hosts/admin',
'etc/hosts/proxy',
'etc/hosts/storage'])]
setup(
name = name,
version = version,
author = "Joao Marcelo Martins",
author_email = "btorch@gmail.com",
description = "Help scripts to setup a swift cluster",
license = "Apache License, (2.0)",
keywords = "openstack swift",
url = "https://github.com/btorch/swift-setup",
packages=find_packages(exclude=['bin']),
classifiers=[
'Development Status :: 1 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Environment :: Console',
'Intended Audience :: System Administrators',
],
install_requires=install_requires,
scripts=['bin/swift-setup',],
data_files = data_files)
src = 'templates'
dst = '/etc/swift-setup/templates'
if os.path.exists(dst):
new_dst = dst + '.old'
if os.path.exists(new_dst):
rmtree(new_dst)
move(dst, new_dst)
if os.path.exists('/etc/swift-setup'):
copytree(src, dst)
| btorch/swift-setup | setup.py | Python | apache-2.0 | 1,605 |
from gettext import gettext as _
from pulp.client.commands.repo import cudl, sync_publish, status
from pulp.client.extensions.decorator import priority
from pulp_openstack.common import constants
from pulp_openstack.extensions.admin.cudl import CreateOpenstackRepositoryCommand
from pulp_openstack.extensions.admin.images import ImageCopyCommand
from pulp_openstack.extensions.admin.images import ImageRemoveCommand
from pulp_openstack.extensions.admin.upload import UploadOpenstackImageCommand
from pulp_openstack.extensions.admin.repo_list import ListOpenstackRepositoriesCommand
SECTION_ROOT = 'openstack'
DESC_ROOT = _('manage Openstack images')
SECTION_REPO = 'repo'
DESC_REPO = _('repository lifecycle commands')
SECTION_UPLOADS = 'uploads'
DESC_UPLOADS = _('upload openstack images into a repository')
SECTION_PUBLISH = 'publish'
DESC_PUBLISH = _('publish a openstack repository')
SECTION_PUBLISH_HTTP = 'http'
DESC_PUBLISH_HTTP = _('publish a openstack repository via http')
SECTION_PUBLISH_GLANCE = 'glance'
DESC_PUBLISH_GLANCE = _('publish a openstack repository via glance')
@priority()
def initialize(context):
"""
create the openstack CLI section and add it to the root
:param context: CLI context
:type context: pulp.client.extensions.core.ClientContext
"""
root_section = context.cli.create_section(SECTION_ROOT, DESC_ROOT)
repo_section = add_repo_section(context, root_section)
add_upload_section(context, repo_section)
add_publish_section(context, repo_section)
def add_upload_section(context, parent_section):
"""
add an upload section to the openstack section
:param context: pulp context
:type context: pulp.client.extensions.core.ClientContext
:param parent_section: section of the CLI to which the upload section
should be added
:type parent_section: pulp.client.extensions.extensions.PulpCliSection
:return: populated section
:rtype: PulpCliSection
"""
upload_section = parent_section.create_subsection(SECTION_UPLOADS, DESC_UPLOADS)
upload_section.add_command(UploadOpenstackImageCommand(context))
return upload_section
def add_repo_section(context, parent_section):
"""
add a repo section to the openstack section
:param context: pulp context
:type context: pulp.client.extensions.core.ClientContext
:param parent_section: section of the CLI to which the repo section
should be added
:type parent_section: pulp.client.extensions.extensions.PulpCliSection
:return: populated section
:rtype: PulpCliSection
"""
repo_section = parent_section.create_subsection(SECTION_REPO, DESC_REPO)
repo_section.add_command(CreateOpenstackRepositoryCommand(context))
repo_section.add_command(cudl.DeleteRepositoryCommand(context))
repo_section.add_command(ImageRemoveCommand(context))
repo_section.add_command(ImageCopyCommand(context))
repo_section.add_command(ListOpenstackRepositoriesCommand(context))
return repo_section
def add_publish_section(context, parent_section):
"""
add a publish section with subsections to the repo section
This adds both an "http" and "glance" publish command.
:param context: pulp context
:type context: pulp.client.extensions.core.ClientContext
:param parent_section: section of the CLI to which the repo section should be added
:type parent_section: pulp.client.extensions.extensions.PulpCliSection
:return: populated section
:rtype: PulpCliSection
"""
publish_section = parent_section.create_subsection(SECTION_PUBLISH, DESC_PUBLISH)
http_section = publish_section.create_subsection(SECTION_PUBLISH_HTTP, DESC_PUBLISH_HTTP)
glance_section = publish_section.create_subsection(SECTION_PUBLISH_GLANCE, DESC_PUBLISH_GLANCE)
renderer = status.PublishStepStatusRenderer(context)
http_section.add_command(
sync_publish.RunPublishRepositoryCommand(context,
renderer,
constants.CLI_WEB_DISTRIBUTOR_ID))
http_section.add_command(sync_publish.PublishStatusCommand(context, renderer))
glance_publish_command = sync_publish.\
RunPublishRepositoryCommand(context, renderer, constants.CLI_GLANCE_DISTRIBUTOR_ID)
glance_section.add_command(glance_publish_command)
glance_section.add_command(sync_publish.PublishStatusCommand(context, renderer))
return publish_section
| pulp/pulp_openstack | extensions_admin/pulp_openstack/extensions/admin/pulp_cli.py | Python | gpl-2.0 | 4,537 |
import gevent
import random
import string
from powerpool.clients import StratumClients
import logging
logging.getLogger().addHandler(logging.StreamHandler())
SEED_CLIENTS = 1000
client_id = 0
def rand_str(N):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(N))
class DummyClient(object):
def __init__(self, address, worker, id):
self.address = address
self.worker = worker
self.id = id
class DummyReporter(object):
def add_one_minute(self, address, acc, stamp, worker, dup, low, stale):
print "add one minute"
def add_shares(self, address, shares):
print "add shares"
def agent_send(self, address, worker, typ, data, time):
print "agent_send"
def transmit_block(self, address, worker, height, total_subsidy, fees,
hex_bits, hash, merged):
pass
class DummyServer(object):
reporter = DummyReporter()
config = dict(share_batch_interval=10)
server = DummyServer()
clients = StratumClients(server)
clients.start()
def client_sim():
global client_id
print "client {} starting".format(client_id)
if clients.address_lut.keys() and random.randint(1, 3) == 1:
address = random.choice(clients.address_lut.keys())
print "picking address from already connected users"
else:
address = rand_str(34)
worker = rand_str(10)
client = DummyClient(address, worker, client_id)
clients[client_id] = client
clients.set_user(client)
client_id += 1
try:
while True:
if 1 == random.randint(1, 100): # diconnect the sim client
break
if 1 == random.randint(1, 5): # submit a share
clients.add_share(address, worker, 100, 1)
gevent.sleep(random.uniform(0, 0.3))
#print "iter on client {}".format(client.id)
finally:
del clients[client.id]
print "client {} closing".format(client.id)
def client_maker():
for i in xrange(SEED_CLIENTS):
gevent.spawn(client_sim)
while True:
gevent.sleep(random.uniform(0.2, 2))
client_sim()
gevent.joinall([gevent.spawn(client_maker)])
| simplecrypto/powerpool | bench/share_fuzzer.py | Python | bsd-2-clause | 2,215 |
"""
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""
from pyjamas import DOM
from pyjamas.ui.Image import Image
"""*
* Static internal collection of ImageLoader instances.
* ImageLoader is not instantiable externally.
"""
imageLoaders = []
"""*
* Provides a mechanism for deferred execution of a callback
* method once all specified Images are loaded.
"""
class ImageLoader:
def __init__(self):
self.images = []
self.callBack = None
self.loadedImages = 0
self.totalImages = 0
"""*
* Stores the ImageElement reference so that when all the images report
* an onload, we can return the array of all the ImageElements.
* @param img
"""
def addHandle(self, img):
self.totalImages += 1
self.images.append(img)
"""*
* Invokes the onImagesLoaded method in the CallBack if all the
* images are loaded AND we have a CallBack specified.
*
* Called from the JSNI onload event handler.
"""
def dispatchIfComplete(self):
if self.callBack is not None and self.isAllLoaded():
self.callBack.onImagesLoaded(self.images)
# remove the image loader
imageLoaders.remove(self)
"""*
* Sets the callback object for the ImageLoader.
* Once this is set, we may invoke the callback once all images that
* need to be loaded report in from their onload event handlers.
*
* @param cb
"""
def finalize(self, cb):
self.callBack = cb
def incrementLoadedImages(self):
self.loadedImages += 1
def isAllLoaded(self):
return (self.loadedImages == self.totalImages)
"""*
* Returns a handle to an img object. Ties back to the ImageLoader instance
"""
def prepareImage(self, url):
img = Image()
img.__isLoaded = False
img.addLoadListener(self)
# normally, event listeners are only set up when the widget
# is attached to part of the DOM (see Widget.onAttach). but,
# in this case, we want a load even _even though_ the Image
# widget is not yet attached (and quite likely won't be).
DOM.setEventListener(img.getElement(), img)
return img
def onLoad(self, img):
if not img.__isLoaded:
# __isLoaded should be set for the first time here.
# if for some reason img fires a second onload event
# we do not want to execute the following again (hence the guard)
img.__isLoaded = True;
self.incrementLoadedImages();
img.removeLoadListener(self)
# we call this function each time onload fires
# It will see if we are ready to invoke the callback
self.dispatchIfComplete();
return img;
"""*
* Takes in an array of url Strings corresponding to the images needed to
* be loaded. The onImagesLoaded() method in the specified CallBack
* object is invoked with an array of ImageElements corresponding to
* the original input array of url Strings once all the images report
* an onload event.
*
* @param urls Array of urls for the images that need to be loaded
* @param cb CallBack object
"""
def loadImages(urls, cb):
il = ImageLoader()
for i in range(len(urls)):
il.addHandle(il.prepareImage(urls[i]))
il.finalize(cb)
imageLoaders.append(il)
# Go ahead and fetch the images now
for i in range(len(urls)):
il.images[i].setUrl(urls[i])
| andreyvit/pyjamas | library/pyjamas/Canvas/ImageLoader.py | Python | apache-2.0 | 4,113 |
from rest_framework import renderers
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.six.moves import StringIO
from django.utils.encoding import smart_text
from rest_framework.compat import six
from rest_framework import negotiation
import json
"""
@author: Jon Nordling
@date: 06/19/2016
XFormListRenderer, is a custom django rest framework, renderer
that passing a data object, will render serializes, data to xml for
the views
"""
class MediaFileContentNegotiation(negotiation.DefaultContentNegotiation):
def filter_renderers(self, renderers, format):
"""
If there is a '.json' style format suffix, filter the renderers
so that we only negotiation against those that accept that format.
If there is no renderer available, we use MediaFileRenderer.
"""
renderers = [renderer for renderer in renderers
if renderer.format == format]
if not renderers:
renderers = [MediaFileRenderer()]
return renderers
class MediaFileRenderer(renderers.BaseRenderer):
media_type = '*/*'
format = None
charset = None
render_style = 'binary'
def render(self, data, accepted_media_type=None, renderer_context=None):
return data
class XFormListRenderer(renderers.BaseRenderer):
"""
Renderer which serializes to XML.
"""
media_type = 'text/xml'
format = 'xml'
charset = 'utf-8'
root_node = 'xforms'
element_node = 'xform'
xmlns = "http://openrosa.org/xforms/xformsList"
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders *obj* into serialized XML.
"""
if data is None:
return ''
elif isinstance(data, six.string_types):
return data
stream = StringIO()
xml = SimplerXMLGenerator(stream, self.charset)
xml.startDocument()
xml.startElement(self.root_node, {'xmlns': self.xmlns})
self._to_xml(xml, data)
xml.endElement(self.root_node)
xml.endDocument()
return stream.getvalue()
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement(self.element_node, {})
self._to_xml(xml, item)
xml.endElement(self.element_node)
elif isinstance(data, dict):
for key, value in six.iteritems(data):
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
elif data is None:
# Don't output any value
pass
else:
xml.characters(smart_text(data))
| Cadasta/cadasta-geoodk | xforms/renderers.py | Python | agpl-3.0 | 2,727 |
import pytest
import base64
import functools
from unittest import mock
from tests.utils import async
from waterbutler.core import streams
class TestBase64Stream:
@async
def test_doesnt_crash_with_none(self):
stream = streams.Base64EncodeStream(streams.StringStream(b''))
data = yield from stream.read()
assert data == b''
@async
def test_read(self):
data = b'this is a test'
expected = base64.b64encode(data)
stream = streams.Base64EncodeStream(streams.StringStream(data))
actual = yield from stream.read()
assert expected == actual
@async
def test_chunking(self):
for chunk_size in range(1, 10):
data = b'the ode to carp'
expected = streams.StringStream(base64.b64encode(data))
stream = streams.Base64EncodeStream(streams.StringStream(data))
hoped = yield from expected.read(chunk_size)
while hoped:
actual = yield from stream.read(chunk_size)
assert actual == hoped
hoped = yield from expected.read(chunk_size)
left_overs = yield from stream.read()
assert left_overs == b''
def test_size(self):
data = b'the ode to carp'
expected = base64.b64encode(data)
stream = streams.Base64EncodeStream(streams.StringStream(data))
assert len(expected) == int(stream.size)
| icereval/waterbutler | tests/core/streams/test_base64encodestream.py | Python | apache-2.0 | 1,443 |
# -*- coding: utf-8 -*-
"""
Copyright 2013 Olivier Cortès <oc@1flow.io>
This file is part of the 1flow project.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
import warnings
with warnings.catch_warnings():
# These warnings are produced by external packages
# that we use in 1flow. We have no time to fix them
# ourselves, and their developers didn't update their
# own code. BUMMER!
warnings.filterwarnings("ignore", category=DeprecationWarning)
import django.conf.urls.defaults # NOQA
import django.utils.hashcompat # NOQA
try:
import jpype # NOQA
except ImportError:
# In case we import on a machine where there is no JVM,
# because JVM is installed only on 'medium' workers. In
# fact, the standard install is completely broken (we
# need to create symlinks for it to work properly), and
# we should avoid crashing here,
pass
| bladebo/1flow | oneflow/settings/snippets/000_nobother.py | Python | agpl-3.0 | 1,544 |
from threading import local
try:
from django.utils.module_loading import import_string as import_func
except ImportError:
from django.utils.module_loading import import_by_path as import_func
from .settings import DATA_BACKEND, DATA_BACKEND_PARAMS
_THREAD_LOCAL = local()
_THREAD_ATTR_LOGGERS = 'logexpose_loggers'
_THREAD_ATTR_BACKENDS = 'logexpose_backends'
def import_class(cls_path):
return import_func(cls_path)
def get_backend():
return import_class(DATA_BACKEND).get_from_thread(DATA_BACKEND_PARAMS)
def thread_get_backend(alias):
return thread_get_obj(_THREAD_ATTR_BACKENDS, alias)
def thread_get_logger(alias):
return thread_get_obj(_THREAD_ATTR_LOGGERS, alias)
def thread_init_backend(alias, obj):
return thread_init_obj(_THREAD_ATTR_BACKENDS, obj, alias)
def thread_init_logger(alias, obj):
return thread_init_obj(_THREAD_ATTR_LOGGERS, obj, alias)
def thread_get_obj(attr_name, alias):
return getattr(_THREAD_LOCAL, attr_name, {}).get(alias)
def thread_init_obj(attr_name, obj, alias):
registry = getattr(_THREAD_LOCAL, attr_name, None)
if registry is None:
registry = {}
setattr(_THREAD_LOCAL, attr_name, registry)
registry[alias] = obj
return obj
def get_func_path(func):
return '%s.%s' % (func.__module__, func.__name__)
| idlesign/django-logexpose | logexpose/utils.py | Python | bsd-3-clause | 1,331 |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example runs a typical daily inventory report."""
import tempfile
# Import appropriate modules from the client library.
from googleads import ad_manager
from googleads import errors
def main(client):
# Initialize a DataDownloader.
report_downloader = client.GetDataDownloader(version='v202108')
# Create report job.
report_job = {
'reportQuery': {
'dimensions': ['DATE', 'AD_UNIT_NAME'],
'adUnitView': 'HIERARCHICAL',
'columns': ['AD_SERVER_IMPRESSIONS', 'AD_SERVER_CLICKS',
'ADSENSE_LINE_ITEM_LEVEL_IMPRESSIONS',
'ADSENSE_LINE_ITEM_LEVEL_CLICKS',
'TOTAL_LINE_ITEM_LEVEL_IMPRESSIONS',
'TOTAL_LINE_ITEM_LEVEL_CPM_AND_CPC_REVENUE'],
'dateRangeType': 'LAST_WEEK'
}
}
try:
# Run the report and wait for it to finish.
report_job_id = report_downloader.WaitForReport(report_job)
except errors.AdManagerReportError as e:
print('Failed to generate report. Error was: %s' % e)
# Change to your preferred export format.
export_format = 'CSV_DUMP'
report_file = tempfile.NamedTemporaryFile(suffix='.csv.gz', delete=False)
# Download report data.
report_downloader.DownloadReportToFile(
report_job_id, export_format, report_file)
report_file.close()
# Display results.
print('Report job with id "%s" downloaded to:\n%s' % (
report_job_id, report_file.name))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| googleads/googleads-python-lib | examples/ad_manager/v202108/report_service/run_inventory_report.py | Python | apache-2.0 | 2,232 |
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.gis.geos import fromstr
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext, string_concat
from shoppleyuser.utils import sms_notify, pretty_date, parse_phone_number
from shoppleyuser.models import Customer, Merchant, ShoppleyUser, CustomerPhone, MerchantPhone, Location
from offer.utils import gen_offer_code, gen_random_pw, gen_tracking_code, pretty_datetime, TxtTemplates
from sorl.thumbnail import ImageField
import logicaldelete.models
from datetime import datetime, timedelta
import random, string, time
#import logicaldelete.models.Model
SMS_DEBUG = settings.SMS_DEBUG
# Create your models here.
class Feature(models.Model):
"""
Featured location that users can see in their offer
home page so that they may add to their preferred merchants
"""
merchant = models.ForeignKey(Merchant, related_name="featured")
time_stamp = models.DateField()
description = models.TextField()
def __unicode__(self):
return self.merchant.name
class Offer(logicaldelete.models.Model):
merchant = models.ForeignKey(Merchant, related_name="offers_published")
title = models.CharField(max_length=128, blank=True, help_text="Sexy offer headline. Keep it under 100 characters.")
description = models.TextField(blank=True)
percentage = models.IntegerField(verbose_name="Percent off (%)", default=0)
dollar_off = models.FloatField(verbose_name="Dollar off ($)", default=0)
offer_value = models.FloatField(verbose_name="Offer Value ($)", default =0)
time_stamp = models.DateTimeField()
starting_time = models.DateTimeField(blank=True, null=True)
duration = models.IntegerField(default=90)
max_offers = models.IntegerField(verbose_name="Max # of customers", default=50, help_text="Maximum number of customers you want to send to")
num_init_sentto = models.IntegerField(default=0) # number of customers the offer was sent to
num_resent_to = models.IntegerField(default=0) # number of customers the offer was resent to
is_merchant_txted = models.BooleanField(default=False) # True if the merchant was informed the status of the offer after it's expired
img = ImageField(upload_to='offers/', blank=True)
expired = models.BooleanField(default=False)
expired_time = models.DateTimeField(null=True, blank=True)
redistributable = models.BooleanField(default=True)
is_processing = models.BooleanField(default=True)
redistribute_processing = models.BooleanField(default=False)
#locations = models.ManyToManyField(Location)
location_created = models.ForeignKey(Location, null=True, blank=True)
starter_phone = models.ForeignKey(MerchantPhone, null =True)
def __unicode__(self):
return self.title
def update_expired_codes(self):
for oc in self.offercode_set.all():
oc.code = oc.code + self.trackingcode.code
oc.save()
def expire(self, reset_duration=False):
"""
expire the offer
"""
#self.expired = True
if reset_duration:
# shorten duration manually
self.duration = 0
self.save()
def is_active(self):
# print "description: ",self.description
if self.expired_time:
active = self.expired_time > datetime.now()
else:
active = self.starting_time+timedelta(minutes=self.duration) > datetime.now()
if not active:
self.expire()
return active
def num_forwarded(self):
return self.offercode_set.count()-self.num_init_sentto-self.num_resent_to
def num_direct_received(self):
return self.num_init_sentto+self.num_resent_to
def num_redeemed(self):
return self.offercode_set.filter(redeem_time__isnull=False).count()
def num_received(self):
return self.offercode_set.count()
def set_location_from_latlon(self, lat , lon):
self.created_location = Location.objects.create(location=(fromstr("POINT(%s %s)" % (lon, lat))))
self.save()
def offer_detail(self, past=False):
"""
Used to report to merchant mobile phone
"""
data = {}
data["offer_id"] = self.id
data["title"] = self.title
data["description"] = self.description
if self.dollar_off != 0:
data["amount"] = self.dollar_off
data["unit"] = 1
elif self.percentage != 0:
data["amount"] = self.percentage
data["unit"] = 2
data["duration"] = self.duration
expire_time = self.starting_time + timedelta(minutes=self.duration)
data["expires"] = int(time.mktime(expire_time.timetuple())) #pretty_date(expire_time-datetime.now())
# currently received does not account for forwarded code
#data["total_received"] = self.num_received()
recvd = self.num_direct_received()
data["received"] = recvd
redeemed = self.num_redeemed()
data["redeemed"] = self.num_redeemed()
if recvd == 0:
data["redeem_rate"] = 0
else:
data["redeem_rate"] = redeemed/float(recvd)*100
data["img"] = self.get_image()
if not past:
data["redistributable"] = self.redistributable
data["is_processing"] = self.is_processing
data["redistribute_processing"] = self.redistribute_processing
return data
def customer_offer_detail(self, user):
"""
Used to report to customer mobile phone
"""
location = self.merchant.location.location
offer_detail = {
"offer_id": self.id,
"name": self.title,
"merchant_name": self.merchant.business_name,
"description": self.description,
# expires for older version only
"expires": pretty_date(self.expired_time - datetime.now()),
"expires_time": int(time.mktime(self.expired_time.timetuple())),
"phone": self.merchant.phone,
"address1": self.merchant.address_1,
"citystatezip": self.merchant.zipcode.citystate(),
"lat": location.y,
"lon": location.x,
"img": self.get_image(),
"banner": self.merchant.get_banner(),
}
if self.percentage:
offer_detail["percentage"] = self.percentage
elif self.dollar_off:
offer_detail["dollar_off"] = self.dollar_off
try:
offercode = OfferCode.objects.get(offer=self, customer=user)
offer_detail["offer_code_id"] = offercode.id
offer_detail["code"] = offercode.code
if offercode.forwarder:
offer_detail["forwarder"] = str(offercode.forwarder)
except OfferCode.DoesNotExist:
pass
return offer_detail
def get_image(self):
if self.img:
return self.img.url
else:
return settings.DEFAULT_OFFER_IMG_URL
def gen_tracking_code(self):
track_code = gen_tracking_code()
while TrackingCode.objects.filter(code__iexact=track_code):
track_code = gen_tracking_code()
TrackingCode.objects.create(
offer=self,
code = track_code
)
return track_code
def gen_offer_code(self, customer, rtype = 1):
if self.offercode_set.filter(customer=customer).exists():
return
gen_code = gen_offer_code().lower()
while self.offercode_set.filter(code__iexact=gen_code):
gen_code = gen_offer_code()
if rtype != 4 and self.redistribute_processing == True:
rtype = 2
self.offercode_set.create (
customer=customer,
code=gen_code,
time_stamp=self.time_stamp,
expiration_time=self.starting_time+timedelta(minutes=self.duration),
rtype = rtype,
)
def gen_offer_codes(self, customers):
"""
TODO: this part needs to be optimized so that the offer code generation
does not have a bottle neck
"""
count=0
for customer in customers:
if customer.is_taking_offers():
if self.offercode_set.filter(customer = customer).exists():
continue
self.gen_offer_code(customer)
count = count +1
#print count, customer
customer.update_offer_count()
#print "reached " , count
return count
def gen_forward_offercode(self,original_code,phone):
#forwarder = OfferCode.objects.filter(code__iexact=original_code)
gen_code = gen_offer_code()
phone =parse_phone_number(phone)
while (OfferCode.objects.filter(code__iexact=gen_code).count()>0):
gen_code = gen_offer_code()
forwarder=original_code.customer
#friend = Customer.objects.get(phone=(phone))
print phone
if CustomerPhone.objects.filter(number=phone).exists():
p = CustomerPhone.objects.get(number=phone)
#if p.shoppleyuser.is_customer():
friend = p.customer
o=self.offercode_set.create(
customer=friend,
code = gen_code,
forwarder=forwarder,
time_stamp=datetime.now(),
expiration_time=original_code.expiration_time,
rtype = 3)
o.save()
forwarder.customer_friends.add(p.customer)
return o, None # for existing customer
# except Customer.DoesNotExist:
# TODO: Need to replace the following with code below
# create a customer
# create a username with phone num and create random password
#print "Creating NEW user with username:", phone
u, created = User.objects.get_or_create(username=phone)
u.email=""
s = string.letters+string.digits
rand_passwd = ''.join(random.sample(s,6))
u.set_password(rand_passwd)
u.save()
friend, created = Customer.objects.get_or_create(user=u, address_1="", address_2="", zipcode=original_code.customer.zipcode)
p= CustomerPhone.objects.create(number = phone, customer = friend)
if created:
friend.set_location_from_address()
# send out a new offercode
o=self.offercode_set.create(
customer = friend,
code = gen_code,
forwarder=forwarder,
time_stamp=datetime.now(),
expiration_time=original_code.expiration_time,
rtype = 3)
o.save()
forwarder.customer_friends.add(friend)
return o, rand_passwd # for new customer
def redistribute(self):
"""
checks if offer is redistributable and sets flags to queue for
redistribution and return True
if not redistributable then return False
"""
if self.redistributable:
self.redistribute_processing = True
self.redistributable = False
self.save()
return True
else:
return False
def redeemers(self):
"""
return list of redeemers
"""
return self.offercode_set.filter(redeem_time__isnull=False)
## keep track of how many forwardings a customer has initiated on an offer
class ForwardState(models.Model):
offer = models.ForeignKey(Offer)
customer = models.ForeignKey(Customer)
remaining = models.IntegerField(default=settings.MAX_FORWARDS)
def is_reach_limit(self):
return self.remaining<=0
def update(self):
self.remaining=self.remaining-1
self.save()
def allowed_forwards(self,requested_forwards):
if self.remaining >=requested_forwards:
return requested_forwards
else:
if self.is_reach_limit():
return 0
else:
return self.remaining
class OfferCode(models.Model):
offer = models.ForeignKey(Offer)
forwarder = models.ForeignKey(Customer,related_name="forwarder", null=True)
# TODO: why is customer null=True
customer = models.ForeignKey(Customer)
code = models.CharField(max_length=32)
txn_amount = models.FloatField(default=0)
time_stamp = models.DateTimeField()
redeem_time = models.DateTimeField(null=True, blank=True)
expiration_time = models.DateTimeField()
feedback = models.TextField()
# 1 through 5, 0 is unrated
rating = models.IntegerField(default=0)
TYPES= (
(1, "Distributed"),
(2, "Reditributed"),
(3, "Forwarded"),
(4, "Manual"),
)
rtype = models.IntegerField(default=1, choices = TYPES, null=True, blank=True)
def is_valid(self):
return datetime.now() < time_stamp + timedelta(minutes=self.offer.duration)
def is_redeemed(self):
return False if self.redeem_time is None else True
def redeem(self):
self.redeem_time = datetime.now()
self.save()
def __unicode__(self):
return self.code + "\n -customer:" + str(self.customer)+"\n -description:"+str(self.offer.description)
def offer_detail(self):
"""
Used to report to customer mobile phone
"""
location = self.offer.merchant.location.location
offer_detail = {"offer_code_id": self.id,
"offer_id": self.offer.id,
"code": self.code,
"name": self.offer.title,
"merchant_name": self.offer.merchant.business_name,
"description": self.offer.description,
"expires": pretty_date(self.expiration_time-datetime.now()),
"expires_time": int(time.mktime(self.expiration_time.timetuple())),
"phone": self.offer.merchant.phone,
"address1": self.offer.merchant.address_1,
"citystatezip": self.offer.merchant.zipcode.citystate(),
"lat": location.y,
"lon": location.x,
"img": self.offer.get_image(),
"banner": self.offer.merchant.get_banner()
}
if self.offer.percentage:
offer_detail["percentage"] = self.offer.percentage
elif self.offer.dollar_off:
offer_detail["dollar_off"] = self.offer.dollar_off
if self.forwarder:
offer_detail["forwarder"] = str(self.forwarder)
return offer_detail
def offer_redeemed(self):
"""
Used to report to customer mobile phone
"""
location = self.offer.merchant.location.location
offer_detail = {"offer_code_id": self.id,
"offer_id": self.offer.id,
"code": self.code,
"name": self.offer.title,
"merchant_name": self.offer.merchant.business_name,
"description": self.offer.description,
"redeemed": self.redeem_time.strftime("%m-%d-%y %H:%M"),
"redeemed_time": int(time.mktime(self.redeem_time.timetuple())),
"txn_amount": "%.2f"%self.txn_amount,
"feedback": self.feedback,
"rating": self.rating,
"phone": self.offer.merchant.phone,
"address1": self.offer.merchant.address_1,
"citystatezip": self.offer.merchant.zipcode.citystate(),
"lat": location.y,
"lon": location.x,
"img": self.offer.get_image(),
"banner": self.offer.merchant.get_banner(),
"expires_time": int(time.mktime(self.expiration_time.timetuple())),
}
if self.offer.percentage:
offer_detail["percentage"] = self.offer.percentage
elif self.offer.dollar_off:
offer_detail["dollar_off"] = self.offer.dollar_off
if self.forwarder:
offer_detail["forwarder"] = str(self.forwarder)
return offer_detail
class OfferCodeAbnormal(models.Model):
ABNORMAL_TYPE = (
("IV", "Invalid Code"),
("DR", "Double redemption"),
("IR", "Internal referral"),
("ER", "External referral"),
)
time_stamp = models.DateTimeField()
ab_type = models.CharField(max_length=2, choices=ABNORMAL_TYPE)
offercode = models.ForeignKey(OfferCode, blank=True, null=True)
invalid_code = models.CharField(max_length=32, blank=True)
referred_customer = models.ForeignKey(Customer, blank=True, null=True)
referred_phone = models.CharField(max_length=20, blank=True, null=True)
def __unicode__(self):
if self.ab_type == "IV":
return "Invalid code: %s" % self.invalid_code
else:
return self.ab_type
class Transaction(models.Model):
TRANS_TYPE = (
("MB", "Merchant To Bank"),
("BM", "Bank To Merchant"),
("CB", "Customer to Bank"),
("BC", "Bank to Customer"),
("MM", "Merchant to Merchant"),
("MC", "Merchant to Customer"),
("CM", "Customer to Merchant"),
("CC", "Customer to Customer"),
)
time_stamp = models.DateTimeField()
trans_type = models.CharField(max_length=2, choices=TRANS_TYPE)
amount = models.IntegerField(default=1)
src = models.ForeignKey(ShoppleyUser, null=True, blank=True, related_name="transactions_originated")
dst = models.ForeignKey(ShoppleyUser, null=True, blank=True, related_name="transctions_received")
def __unicode__(self):
return "%d points from %s to %s" % (self.amount, self.src, self.dst)
class TrackingCode(models.Model):
offer = models.OneToOneField(Offer)
code = models.CharField(max_length=32)
def __unicode__(self):
return "code: %s for offer: %s" % (self.code, self.offer)
class Vote(models.Model):
customer = models.ForeignKey(Customer)
offer = models.ForeignKey(Offer)
VOTE_CHOICES = (
(1, "yay"),
(-1,"nay"),
(0, "pending"),
)
vote = models.IntegerField(default=0, choices = VOTE_CHOICES)
time_stamp = models.DateTimeField()
def __unicode__(self):
return "%s: %s -> %s" % (self.vote, self.customer, self.offer)
class BlackListWord(logicaldelete.models.Model):
word = models.CharField(max_length=128)
def __unicode__(self):
return word
class BlackListOffer(logicaldelete.models.Model):
offer = models.ForeignKey(Offer)
words = models.ManyToManyField(BlackListWord)
def __unicode__(self):
return offer
| kwantopia/shoppley-migrate | shoppley.com/shoppley/apps/offer/models.py | Python | mit | 16,256 |
import unittest
from netengine.backends.ssh import OpenWRT
from ..settings import settings
__all__ = ['TestSSHOpenWRT']
class TestSSHOpenWRT(unittest.TestCase):
def setUp(self):
self.host = settings['openwrt-ssh']['host']
self.username = settings['openwrt-ssh']['username']
self.password = settings['openwrt-ssh']['password']
self.port = settings['openwrt-ssh'].get('port', 22)
self.device = OpenWRT(self.host, self.username, self.password, self.port)
self.device.connect()
def test_properties(self):
device = self.device
device.os
device.name
device.olsr
device.disconnect()
def test_wireless_mode(self):
self.assertTrue(self.device.wireless_mode in ['ap', 'sta'])
def test_RAM_total(self):
self.assertTrue(type(self.device.RAM_total) == int)
def test_uptime(self):
self.assertTrue(type(self.device.uptime) == int)
def test_interfaces_to_dict(self):
self.assertTrue(type(self.device.interfaces_to_dict) == dict)
def test_uptime_tuple(self):
self.assertTrue(type(self.device.uptime_tuple) == tuple)
def test_to_dict(self):
self.assertTrue(isinstance(self.device.to_dict(), dict))
def test_filter_radio_interfaces(self):
self.assertTrue(isinstance(self.device._filter_radio_interfaces(), dict))
def test_filter_radio(self):
self.assertTrue(isinstance(self.device._filter_radio(), dict))
def test_manufacturer(self):
self.assertTrue(type(self.device.manufacturer) == str)
def test_filter_routing_protocols(self):
self.assertTrue(isinstance(self.device._filter_routing_protocols(), list))
| ninuxorg/netengine | tests/ssh/openwrt.py | Python | mit | 1,774 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a variety of device interactions based on adb.
Eventually, this will be based on adb_wrapper.
"""
# pylint: disable=unused-argument
import calendar
import collections
import fnmatch
import itertools
import json
import logging
import multiprocessing
import os
import posixpath
import pprint
import random
import re
import shutil
import stat
import tempfile
import time
import threading
import uuid
import zipfile
from devil import base_error
from devil import devil_env
from devil.utils import cmd_helper
from devil.android import apk_helper
from devil.android import device_signal
from devil.android import decorators
from devil.android import device_errors
from devil.android import device_temp_file
from devil.android import install_commands
from devil.android import logcat_monitor
from devil.android import md5sum
from devil.android.sdk import adb_wrapper
from devil.android.sdk import intent
from devil.android.sdk import keyevent
from devil.android.sdk import split_select
from devil.android.sdk import version_codes
from devil.utils import host_utils
from devil.utils import parallelizer
from devil.utils import reraiser_thread
from devil.utils import timeout_retry
from devil.utils import zip_utils
logger = logging.getLogger(__name__)
_DEFAULT_TIMEOUT = 30
_DEFAULT_RETRIES = 3
# A sentinel object for default values
# TODO(jbudorick,perezju): revisit how default values are handled by
# the timeout_retry decorators.
DEFAULT = object()
_RESTART_ADBD_SCRIPT = """
trap '' HUP
trap '' TERM
trap '' PIPE
function restart() {
stop adbd
start adbd
}
restart &
"""
# Not all permissions can be set.
_PERMISSIONS_BLACKLIST_RE = re.compile('|'.join(fnmatch.translate(p) for p in [
'android.permission.ACCESS_LOCATION_EXTRA_COMMANDS',
'android.permission.ACCESS_MOCK_LOCATION',
'android.permission.ACCESS_NETWORK_STATE',
'android.permission.ACCESS_NOTIFICATION_POLICY',
'android.permission.ACCESS_WIFI_STATE',
'android.permission.AUTHENTICATE_ACCOUNTS',
'android.permission.BLUETOOTH',
'android.permission.BLUETOOTH_ADMIN',
'android.permission.BROADCAST_STICKY',
'android.permission.CHANGE_NETWORK_STATE',
'android.permission.CHANGE_WIFI_MULTICAST_STATE',
'android.permission.CHANGE_WIFI_STATE',
'android.permission.DISABLE_KEYGUARD',
'android.permission.DOWNLOAD_WITHOUT_NOTIFICATION',
'android.permission.EXPAND_STATUS_BAR',
'android.permission.GET_PACKAGE_SIZE',
'android.permission.INSTALL_SHORTCUT',
'android.permission.INJECT_EVENTS',
'android.permission.INTERNET',
'android.permission.KILL_BACKGROUND_PROCESSES',
'android.permission.MANAGE_ACCOUNTS',
'android.permission.MODIFY_AUDIO_SETTINGS',
'android.permission.NFC',
'android.permission.READ_SYNC_SETTINGS',
'android.permission.READ_SYNC_STATS',
'android.permission.RECEIVE_BOOT_COMPLETED',
'android.permission.RECORD_VIDEO',
'android.permission.REORDER_TASKS',
'android.permission.REQUEST_INSTALL_PACKAGES',
'android.permission.RESTRICTED_VR_ACCESS',
'android.permission.RUN_INSTRUMENTATION',
'android.permission.SET_ALARM',
'android.permission.SET_TIME_ZONE',
'android.permission.SET_WALLPAPER',
'android.permission.SET_WALLPAPER_HINTS',
'android.permission.TRANSMIT_IR',
'android.permission.USE_CREDENTIALS',
'android.permission.USE_FINGERPRINT',
'android.permission.VIBRATE',
'android.permission.WAKE_LOCK',
'android.permission.WRITE_SYNC_SETTINGS',
'com.android.browser.permission.READ_HISTORY_BOOKMARKS',
'com.android.browser.permission.WRITE_HISTORY_BOOKMARKS',
'com.android.launcher.permission.INSTALL_SHORTCUT',
'com.chrome.permission.DEVICE_EXTRAS',
'com.google.android.apps.now.CURRENT_ACCOUNT_ACCESS',
'com.google.android.c2dm.permission.RECEIVE',
'com.google.android.providers.gsf.permission.READ_GSERVICES',
'com.sec.enterprise.knox.MDM_CONTENT_PROVIDER',
'*.permission.C2D_MESSAGE',
'*.permission.READ_WRITE_BOOKMARK_FOLDERS',
'*.TOS_ACKED',
]))
_SHELL_OUTPUT_SEPARATOR = '~X~'
_PERMISSIONS_EXCEPTION_RE = re.compile(
r'java\.lang\.\w+Exception: .*$', re.MULTILINE)
_CURRENT_FOCUS_CRASH_RE = re.compile(
r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
_GETPROP_RE = re.compile(r'\[(.*?)\]: \[(.*?)\]')
# Regex to parse the long (-l) output of 'ls' command, c.f.
# https://github.com/landley/toybox/blob/master/toys/posix/ls.c#L446
_LONG_LS_OUTPUT_RE = re.compile(
r'(?P<st_mode>[\w-]{10})\s+' # File permissions
r'(?:(?P<st_nlink>\d+)\s+)?' # Number of links (optional)
r'(?P<st_owner>\w+)\s+' # Name of owner
r'(?P<st_group>\w+)\s+' # Group of owner
r'(?:' # Either ...
r'(?P<st_rdev_major>\d+),\s+' # Device major, and
r'(?P<st_rdev_minor>\d+)\s+' # Device minor
r'|' # .. or
r'(?P<st_size>\d+)\s+' # Size in bytes
r')?' # .. or nothing
r'(?P<st_mtime>\d{4}-\d\d-\d\d \d\d:\d\d)\s+' # Modification date/time
r'(?P<filename>.+?)' # File name
r'(?: -> (?P<symbolic_link_to>.+))?' # Symbolic link (optional)
r'$' # End of string
)
_LS_DATE_FORMAT = '%Y-%m-%d %H:%M'
_FILE_MODE_RE = re.compile(r'[dbclps-](?:[r-][w-][xSs-]){2}[r-][w-][xTt-]$')
_FILE_MODE_KIND = {
'd': stat.S_IFDIR, 'b': stat.S_IFBLK, 'c': stat.S_IFCHR,
'l': stat.S_IFLNK, 'p': stat.S_IFIFO, 's': stat.S_IFSOCK,
'-': stat.S_IFREG}
_FILE_MODE_PERMS = [
stat.S_IRUSR, stat.S_IWUSR, stat.S_IXUSR,
stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP,
stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH,
]
_FILE_MODE_SPECIAL = [
('s', stat.S_ISUID),
('s', stat.S_ISGID),
('t', stat.S_ISVTX),
]
_SELINUX_MODE = {
'enforcing': True,
'permissive': False,
'disabled': None
}
# Some devices require different logic for checking if root is necessary
_SPECIAL_ROOT_DEVICE_LIST = [
'marlin',
'sailfish',
]
_IMEI_RE = re.compile(r' Device ID = (.+)$')
# The following regex is used to match result parcels like:
"""
Result: Parcel(
0x00000000: 00000000 0000000f 00350033 00360033 '........3.5.3.6.'
0x00000010: 00360032 00370030 00300032 00300039 '2.6.0.7.2.0.9.0.'
0x00000020: 00380033 00000039 '3.8.9... ')
"""
_PARCEL_RESULT_RE = re.compile(
r'0x[0-9a-f]{8}\: (?:[0-9a-f]{8}\s+){1,4}\'(.{16})\'')
_EBUSY_RE = re.compile(
r'mkdir failed for ([^,]*), Device or resource busy')
@decorators.WithExplicitTimeoutAndRetries(
_DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
def GetAVDs():
"""Returns a list of Android Virtual Devices.
Returns:
A list containing the configured AVDs.
"""
lines = cmd_helper.GetCmdOutput([
os.path.join(devil_env.config.LocalPath('android_sdk'),
'tools', 'android'),
'list', 'avd']).splitlines()
avds = []
for line in lines:
if 'Name:' not in line:
continue
key, value = (s.strip() for s in line.split(':', 1))
if key == 'Name':
avds.append(value)
return avds
@decorators.WithExplicitTimeoutAndRetries(
_DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
def RestartServer():
"""Restarts the adb server.
Raises:
CommandFailedError if we fail to kill or restart the server.
"""
def adb_killed():
return not adb_wrapper.AdbWrapper.IsServerOnline()
def adb_started():
return adb_wrapper.AdbWrapper.IsServerOnline()
adb_wrapper.AdbWrapper.KillServer()
if not timeout_retry.WaitFor(adb_killed, wait_period=1, max_tries=5):
# TODO(perezju): raise an exception after fixng http://crbug.com/442319
logger.warning('Failed to kill adb server')
adb_wrapper.AdbWrapper.StartServer()
if not timeout_retry.WaitFor(adb_started, wait_period=1, max_tries=5):
raise device_errors.CommandFailedError('Failed to start adb server')
def _ParseModeString(mode_str):
"""Parse a mode string, e.g. 'drwxrwxrwx', into a st_mode value.
Effectively the reverse of |mode_to_string| in, e.g.:
https://github.com/landley/toybox/blob/master/lib/lib.c#L896
"""
if not _FILE_MODE_RE.match(mode_str):
raise ValueError('Unexpected file mode %r', mode_str)
mode = _FILE_MODE_KIND[mode_str[0]]
for c, flag in zip(mode_str[1:], _FILE_MODE_PERMS):
if c != '-' and c.islower():
mode |= flag
for c, (t, flag) in zip(mode_str[3::3], _FILE_MODE_SPECIAL):
if c.lower() == t:
mode |= flag
return mode
def _GetTimeStamp():
"""Return a basic ISO 8601 time stamp with the current local time."""
return time.strftime('%Y%m%dT%H%M%S', time.localtime())
def _JoinLines(lines):
# makes sure that the last line is also terminated, and is more memory
# efficient than first appending an end-line to each line and then joining
# all of them together.
return ''.join(s for line in lines for s in (line, '\n'))
def _CreateAdbWrapper(device):
if isinstance(device, adb_wrapper.AdbWrapper):
return device
else:
return adb_wrapper.AdbWrapper(device)
def _FormatPartialOutputError(output):
lines = output.splitlines() if isinstance(output, basestring) else output
message = ['Partial output found:']
if len(lines) > 11:
message.extend('- %s' % line for line in lines[:5])
message.extend('<snip>')
message.extend('- %s' % line for line in lines[-5:])
else:
message.extend('- %s' % line for line in lines)
return '\n'.join(message)
class DeviceUtils(object):
_MAX_ADB_COMMAND_LENGTH = 512
_MAX_ADB_OUTPUT_LENGTH = 32768
_LAUNCHER_FOCUSED_RE = re.compile(
r'\s*mCurrentFocus.*(Launcher|launcher).*')
_VALID_SHELL_VARIABLE = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*$')
LOCAL_PROPERTIES_PATH = posixpath.join('/', 'data', 'local.prop')
# Property in /data/local.prop that controls Java assertions.
JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
def __init__(self, device, enable_device_files_cache=False,
default_timeout=_DEFAULT_TIMEOUT,
default_retries=_DEFAULT_RETRIES):
"""DeviceUtils constructor.
Args:
device: Either a device serial, an existing AdbWrapper instance, or an
an existing AndroidCommands instance.
enable_device_files_cache: For PushChangedFiles(), cache checksums of
pushed files rather than recomputing them on a subsequent call.
default_timeout: An integer containing the default number of seconds to
wait for an operation to complete if no explicit value is provided.
default_retries: An integer containing the default number or times an
operation should be retried on failure if no explicit value is provided.
"""
self.adb = None
if isinstance(device, basestring):
self.adb = _CreateAdbWrapper(device)
elif isinstance(device, adb_wrapper.AdbWrapper):
self.adb = device
else:
raise ValueError('Unsupported device value: %r' % device)
self._commands_installed = None
self._default_timeout = default_timeout
self._default_retries = default_retries
self._enable_device_files_cache = enable_device_files_cache
self._cache = {}
self._client_caches = {}
self._cache_lock = threading.RLock()
assert hasattr(self, decorators.DEFAULT_TIMEOUT_ATTR)
assert hasattr(self, decorators.DEFAULT_RETRIES_ATTR)
self._ClearCache()
@property
def serial(self):
"""Returns the device serial."""
return self.adb.GetDeviceSerial()
def __eq__(self, other):
"""Checks whether |other| refers to the same device as |self|.
Args:
other: The object to compare to. This can be a basestring, an instance
of adb_wrapper.AdbWrapper, or an instance of DeviceUtils.
Returns:
Whether |other| refers to the same device as |self|.
"""
return self.serial == str(other)
def __lt__(self, other):
"""Compares two instances of DeviceUtils.
This merely compares their serial numbers.
Args:
other: The instance of DeviceUtils to compare to.
Returns:
Whether |self| is less than |other|.
"""
return self.serial < other.serial
def __str__(self):
"""Returns the device serial."""
return self.serial
@decorators.WithTimeoutAndRetriesFromInstance()
def IsOnline(self, timeout=None, retries=None):
"""Checks whether the device is online.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if the device is online, False otherwise.
Raises:
CommandTimeoutError on timeout.
"""
try:
return self.adb.GetState() == 'device'
except base_error.BaseError as exc:
logger.info('Failed to get state: %s', exc)
return False
@decorators.WithTimeoutAndRetriesFromInstance()
def HasRoot(self, timeout=None, retries=None):
"""Checks whether or not adbd has root privileges.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if adbd has root privileges, False otherwise.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
try:
if self.product_name in _SPECIAL_ROOT_DEVICE_LIST:
return self.GetProp('service.adb.root') == '1'
self.RunShellCommand(['ls', '/root'], check_return=True)
return True
except device_errors.AdbCommandFailedError:
return False
def NeedsSU(self, timeout=DEFAULT, retries=DEFAULT):
"""Checks whether 'su' is needed to access protected resources.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if 'su' is available on the device and is needed to to access
protected resources; False otherwise if either 'su' is not available
(e.g. because the device has a user build), or not needed (because adbd
already has root privileges).
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if 'needs_su' not in self._cache:
cmd = '%s && ! ls /root' % self._Su('ls /root')
if self.product_name in _SPECIAL_ROOT_DEVICE_LIST:
if self.HasRoot():
self._cache['needs_su'] = False
return False
cmd = 'which which && which su'
try:
self.RunShellCommand(cmd, shell=True, check_return=True,
timeout=self._default_timeout if timeout is DEFAULT else timeout,
retries=self._default_retries if retries is DEFAULT else retries)
self._cache['needs_su'] = True
except device_errors.AdbCommandFailedError:
self._cache['needs_su'] = False
return self._cache['needs_su']
def _Su(self, command):
if self.build_version_sdk >= version_codes.MARSHMALLOW:
return 'su 0 %s' % command
return 'su -c %s' % command
@decorators.WithTimeoutAndRetriesFromInstance()
def EnableRoot(self, timeout=None, retries=None):
"""Restarts adbd with root privileges.
Args:
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if root could not be enabled.
CommandTimeoutError on timeout.
"""
if 'needs_su' in self._cache:
del self._cache['needs_su']
try:
self.adb.Root()
except device_errors.AdbCommandFailedError:
if self.IsUserBuild():
raise device_errors.CommandFailedError(
'Unable to root device with user build.', str(self))
else:
raise # Failed probably due to some other reason.
def device_online_with_root():
try:
self.adb.WaitForDevice()
return self.GetProp('service.adb.root', cache=False) == '1'
except (device_errors.AdbCommandFailedError,
device_errors.DeviceUnreachableError):
return False
timeout_retry.WaitFor(device_online_with_root, wait_period=1)
@decorators.WithTimeoutAndRetriesFromInstance()
def IsUserBuild(self, timeout=None, retries=None):
"""Checks whether or not the device is running a user build.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True if the device is running a user build, False otherwise (i.e. if
it's running a userdebug build).
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
return self.build_type == 'user'
@decorators.WithTimeoutAndRetriesFromInstance()
def GetExternalStoragePath(self, timeout=None, retries=None):
"""Get the device's path to its SD card.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
The device's path to its SD card.
Raises:
CommandFailedError if the external storage path could not be determined.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
self._EnsureCacheInitialized()
if not self._cache['external_storage']:
raise device_errors.CommandFailedError('$EXTERNAL_STORAGE is not set',
str(self))
return self._cache['external_storage']
@decorators.WithTimeoutAndRetriesFromInstance()
def GetIMEI(self, timeout=None, retries=None):
"""Get the device's IMEI.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
The device's IMEI.
Raises:
AdbCommandFailedError on error
"""
if self._cache.get('imei') is not None:
return self._cache.get('imei')
if self.build_version_sdk < 21:
out = self.RunShellCommand(['dumpsys', 'iphonesubinfo'],
raw_output=True, check_return=True)
if out:
match = re.search(_IMEI_RE, out)
if match:
self._cache['imei'] = match.group(1)
return self._cache['imei']
else:
out = self.RunShellCommand(['service', 'call', 'iphonesubinfo', '1'],
check_return=True)
if out:
imei = ''
for line in out:
match = re.search(_PARCEL_RESULT_RE, line)
if match:
imei = imei + match.group(1)
imei = imei.replace('.', '').strip()
if imei:
self._cache['imei'] = imei
return self._cache['imei']
raise device_errors.CommandFailedError('Unable to fetch IMEI.')
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationPaths(self, package, timeout=None, retries=None):
"""Get the paths of the installed apks on the device for the given package.
Args:
package: Name of the package.
Returns:
List of paths to the apks on the device for the given package.
"""
return self._GetApplicationPathsInternal(package)
def _GetApplicationPathsInternal(self, package, skip_cache=False):
cached_result = self._cache['package_apk_paths'].get(package)
if cached_result is not None and not skip_cache:
if package in self._cache['package_apk_paths_to_verify']:
self._cache['package_apk_paths_to_verify'].remove(package)
# Don't verify an app that is not thought to be installed. We are
# concerned only with apps we think are installed having been
# uninstalled manually.
if cached_result and not self.PathExists(cached_result):
cached_result = None
self._cache['package_apk_checksums'].pop(package, 0)
if cached_result is not None:
return list(cached_result)
# 'pm path' is liable to incorrectly exit with a nonzero number starting
# in Lollipop.
# TODO(jbudorick): Check if this is fixed as new Android versions are
# released to put an upper bound on this.
should_check_return = (self.build_version_sdk < version_codes.LOLLIPOP)
output = self.RunShellCommand(
['pm', 'path', package], check_return=should_check_return)
apks = []
bad_output = False
for line in output:
if line.startswith('package:'):
apks.append(line[len('package:'):])
elif line.startswith('WARNING:'):
continue
else:
bad_output = True # Unexpected line in output.
if not apks and output:
if bad_output:
raise device_errors.CommandFailedError(
'Unexpected pm path output: %r' % '\n'.join(output), str(self))
else:
logger.warning('pm returned no paths but the following warnings:')
for line in output:
logger.warning('- %s', line)
self._cache['package_apk_paths'][package] = list(apks)
return apks
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationVersion(self, package, timeout=None, retries=None):
"""Get the version name of a package installed on the device.
Args:
package: Name of the package.
Returns:
A string with the version name or None if the package is not found
on the device.
"""
output = self.RunShellCommand(
['dumpsys', 'package', package], check_return=True)
if not output:
return None
for line in output:
line = line.strip()
if line.startswith('versionName='):
return line[len('versionName='):]
raise device_errors.CommandFailedError(
'Version name for %s not found on dumpsys output' % package, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def GetApplicationDataDirectory(self, package, timeout=None, retries=None):
"""Get the data directory on the device for the given package.
Args:
package: Name of the package.
Returns:
The package's data directory.
Raises:
CommandFailedError if the package's data directory can't be found,
whether because it's not installed or otherwise.
"""
output = self._RunPipedShellCommand(
'pm dump %s | grep dataDir=' % cmd_helper.SingleQuote(package))
for line in output:
_, _, dataDir = line.partition('dataDir=')
if dataDir:
return dataDir
raise device_errors.CommandFailedError(
'Could not find data directory for %s', package)
@decorators.WithTimeoutAndRetriesFromInstance()
def WaitUntilFullyBooted(self, wifi=False, timeout=None, retries=None):
"""Wait for the device to fully boot.
This means waiting for the device to boot, the package manager to be
available, and the SD card to be ready. It can optionally mean waiting
for wifi to come up, too.
Args:
wifi: A boolean indicating if we should wait for wifi to come up or not.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError if one of the component waits times out.
DeviceUnreachableError if the device becomes unresponsive.
"""
def sd_card_ready():
try:
self.RunShellCommand(['test', '-d', self.GetExternalStoragePath()],
check_return=True)
return True
except device_errors.AdbCommandFailedError:
return False
def pm_ready():
try:
return self._GetApplicationPathsInternal('android', skip_cache=True)
except device_errors.CommandFailedError:
return False
def boot_completed():
try:
return self.GetProp('sys.boot_completed', cache=False) == '1'
except device_errors.CommandFailedError:
return False
def wifi_enabled():
return 'Wi-Fi is enabled' in self.RunShellCommand(['dumpsys', 'wifi'],
check_return=False)
self.adb.WaitForDevice()
timeout_retry.WaitFor(sd_card_ready)
timeout_retry.WaitFor(pm_ready)
timeout_retry.WaitFor(boot_completed)
if wifi:
timeout_retry.WaitFor(wifi_enabled)
REBOOT_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=REBOOT_DEFAULT_TIMEOUT)
def Reboot(self, block=True, wifi=False, timeout=None, retries=None):
"""Reboot the device.
Args:
block: A boolean indicating if we should wait for the reboot to complete.
wifi: A boolean indicating if we should wait for wifi to be enabled after
the reboot. The option has no effect unless |block| is also True.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def device_offline():
return not self.IsOnline()
self.adb.Reboot()
self._ClearCache()
timeout_retry.WaitFor(device_offline, wait_period=1)
if block:
self.WaitUntilFullyBooted(wifi=wifi)
INSTALL_DEFAULT_TIMEOUT = 4 * _DEFAULT_TIMEOUT
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=INSTALL_DEFAULT_TIMEOUT)
def Install(self, apk, allow_downgrade=False, reinstall=False,
permissions=None, timeout=None, retries=None):
"""Install an APK.
Noop if an identical APK is already installed.
Args:
apk: An ApkHelper instance or string containing the path to the APK.
allow_downgrade: A boolean indicating if we should allow downgrades.
reinstall: A boolean indicating if we should keep any existing app data.
permissions: Set of permissions to set. If not set, finds permissions with
apk helper. To set no permissions, pass [].
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the installation fails.
CommandTimeoutError if the installation times out.
DeviceUnreachableError on missing device.
"""
self._InstallInternal(apk, None, allow_downgrade=allow_downgrade,
reinstall=reinstall, permissions=permissions)
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=INSTALL_DEFAULT_TIMEOUT)
def InstallSplitApk(self, base_apk, split_apks, allow_downgrade=False,
reinstall=False, allow_cached_props=False,
permissions=None, timeout=None, retries=None):
"""Install a split APK.
Noop if all of the APK splits are already installed.
Args:
base_apk: An ApkHelper instance or string containing the path to the base
APK.
split_apks: A list of strings of paths of all of the APK splits.
allow_downgrade: A boolean indicating if we should allow downgrades.
reinstall: A boolean indicating if we should keep any existing app data.
allow_cached_props: Whether to use cached values for device properties.
permissions: Set of permissions to set. If not set, finds permissions with
apk helper. To set no permissions, pass [].
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the installation fails.
CommandTimeoutError if the installation times out.
DeviceUnreachableError on missing device.
DeviceVersionError if device SDK is less than Android L.
"""
self._InstallInternal(base_apk, split_apks, reinstall=reinstall,
allow_cached_props=allow_cached_props,
permissions=permissions,
allow_downgrade=allow_downgrade)
def _InstallInternal(self, base_apk, split_apks, allow_downgrade=False,
reinstall=False, allow_cached_props=False,
permissions=None):
if split_apks:
self._CheckSdkLevel(version_codes.LOLLIPOP)
base_apk = apk_helper.ToHelper(base_apk)
all_apks = [base_apk.path]
if split_apks:
all_apks += split_select.SelectSplits(
self, base_apk.path, split_apks, allow_cached_props=allow_cached_props)
if len(all_apks) == 1:
logger.warning('split-select did not select any from %s', split_apks)
missing_apks = [apk for apk in all_apks if not os.path.exists(apk)]
if missing_apks:
raise device_errors.CommandFailedError(
'Attempted to install non-existent apks: %s'
% pprint.pformat(missing_apks))
package_name = base_apk.GetPackageName()
device_apk_paths = self._GetApplicationPathsInternal(package_name)
apks_to_install = None
host_checksums = None
if not device_apk_paths:
apks_to_install = all_apks
elif len(device_apk_paths) > 1 and not split_apks:
logger.warning(
'Installing non-split APK when split APK was previously installed')
apks_to_install = all_apks
elif len(device_apk_paths) == 1 and split_apks:
logger.warning(
'Installing split APK when non-split APK was previously installed')
apks_to_install = all_apks
else:
try:
apks_to_install, host_checksums = (
self._ComputeStaleApks(package_name, all_apks))
except EnvironmentError as e:
logger.warning('Error calculating md5: %s', e)
apks_to_install, host_checksums = all_apks, None
if apks_to_install and not reinstall:
self.Uninstall(package_name)
apks_to_install = all_apks
if apks_to_install:
# Assume that we won't know the resulting device state.
self._cache['package_apk_paths'].pop(package_name, 0)
self._cache['package_apk_checksums'].pop(package_name, 0)
if split_apks:
partial = package_name if len(apks_to_install) < len(all_apks) else None
self.adb.InstallMultiple(
apks_to_install, partial=partial, reinstall=reinstall,
allow_downgrade=allow_downgrade)
else:
self.adb.Install(
base_apk.path, reinstall=reinstall, allow_downgrade=allow_downgrade)
else:
# Running adb install terminates running instances of the app, so to be
# consistent, we explicitly terminate it when skipping the install.
self.ForceStop(package_name)
if (permissions is None
and self.build_version_sdk >= version_codes.MARSHMALLOW):
permissions = base_apk.GetPermissions()
self.GrantPermissions(package_name, permissions)
# Upon success, we know the device checksums, but not their paths.
if host_checksums is not None:
self._cache['package_apk_checksums'][package_name] = host_checksums
@decorators.WithTimeoutAndRetriesFromInstance()
def Uninstall(self, package_name, keep_data=False, timeout=None,
retries=None):
"""Remove the app |package_name| from the device.
This is a no-op if the app is not already installed.
Args:
package_name: The package to uninstall.
keep_data: (optional) Whether to keep the data and cache directories.
timeout: Timeout in seconds.
retries: Number of retries.
Raises:
CommandFailedError if the uninstallation fails.
CommandTimeoutError if the uninstallation times out.
DeviceUnreachableError on missing device.
"""
installed = self._GetApplicationPathsInternal(package_name)
if not installed:
return
try:
self.adb.Uninstall(package_name, keep_data)
self._cache['package_apk_paths'][package_name] = []
self._cache['package_apk_checksums'][package_name] = set()
except:
# Clear cache since we can't be sure of the state.
self._cache['package_apk_paths'].pop(package_name, 0)
self._cache['package_apk_checksums'].pop(package_name, 0)
raise
def _CheckSdkLevel(self, required_sdk_level):
"""Raises an exception if the device does not have the required SDK level.
"""
if self.build_version_sdk < required_sdk_level:
raise device_errors.DeviceVersionError(
('Requires SDK level %s, device is SDK level %s' %
(required_sdk_level, self.build_version_sdk)),
device_serial=self.serial)
@decorators.WithTimeoutAndRetriesFromInstance()
def RunShellCommand(self, cmd, shell=False, check_return=False, cwd=None,
env=None, run_as=None, as_root=False, single_line=False,
large_output=False, raw_output=False, timeout=None,
retries=None):
"""Run an ADB shell command.
The command to run |cmd| should be a sequence of program arguments
(preferred) or a single string with a shell script to run.
When |cmd| is a sequence, it is assumed to contain the name of the command
to run followed by its arguments. In this case, arguments are passed to the
command exactly as given, preventing any further processing by the shell.
This allows callers to easily pass arguments with spaces or special
characters without having to worry about quoting rules. Whenever possible,
it is recomended to pass |cmd| as a sequence.
When |cmd| is passed as a single string, |shell| should be set to True.
The command will be interpreted and run by the shell on the device,
allowing the use of shell features such as pipes, wildcards, or variables.
Failing to set shell=True will issue a warning, but this will be changed
to a hard failure in the future (see: catapult:#3242).
This behaviour is consistent with that of command runners in cmd_helper as
well as Python's own subprocess.Popen.
TODO(perezju) Change the default of |check_return| to True when callers
have switched to the new behaviour.
Args:
cmd: A sequence containing the command to run and its arguments, or a
string with a shell script to run (should also set shell=True).
shell: A boolean indicating whether shell features may be used in |cmd|.
check_return: A boolean indicating whether or not the return code should
be checked.
cwd: The device directory in which the command should be run.
env: The environment variables with which the command should be run.
run_as: A string containing the package as which the command should be
run.
as_root: A boolean indicating whether the shell command should be run
with root privileges.
single_line: A boolean indicating if only a single line of output is
expected.
large_output: Uses a work-around for large shell command output. Without
this large output will be truncated.
raw_output: Whether to only return the raw output
(no splitting into lines).
timeout: timeout in seconds
retries: number of retries
Returns:
If single_line is False, the output of the command as a list of lines,
otherwise, a string with the unique line of output emmited by the command
(with the optional newline at the end stripped).
Raises:
AdbCommandFailedError if check_return is True and the exit code of
the command run on the device is non-zero.
CommandFailedError if single_line is True but the output contains two or
more lines.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def env_quote(key, value):
if not DeviceUtils._VALID_SHELL_VARIABLE.match(key):
raise KeyError('Invalid shell variable name %r' % key)
# using double quotes here to allow interpolation of shell variables
return '%s=%s' % (key, cmd_helper.DoubleQuote(value))
def run(cmd):
return self.adb.Shell(cmd)
def handle_check_return(cmd):
try:
return run(cmd)
except device_errors.AdbCommandFailedError as exc:
if check_return:
raise
else:
return exc.output
def handle_large_command(cmd):
if len(cmd) < self._MAX_ADB_COMMAND_LENGTH:
return handle_check_return(cmd)
else:
with device_temp_file.DeviceTempFile(self.adb, suffix='.sh') as script:
self._WriteFileWithPush(script.name, cmd)
logger.info('Large shell command will be run from file: %s ...',
cmd[:self._MAX_ADB_COMMAND_LENGTH])
return handle_check_return('sh %s' % script.name_quoted)
def handle_large_output(cmd, large_output_mode):
if large_output_mode:
with device_temp_file.DeviceTempFile(self.adb) as large_output_file:
cmd = '( %s )>%s 2>&1' % (cmd, large_output_file.name)
logger.debug('Large output mode enabled. Will write output to '
'device and read results from file.')
handle_large_command(cmd)
return self.ReadFile(large_output_file.name, force_pull=True)
else:
try:
return handle_large_command(cmd)
except device_errors.AdbCommandFailedError as exc:
if exc.status is None:
logger.error(_FormatPartialOutputError(exc.output))
logger.warning('Attempting to run in large_output mode.')
logger.warning('Use RunShellCommand(..., large_output=True) for '
'shell commands that expect a lot of output.')
return handle_large_output(cmd, True)
else:
raise
if isinstance(cmd, basestring):
if not shell:
logger.warning(
'The command to run should preferably be passed as a sequence of'
' args. If shell features are needed (pipes, wildcards, variables)'
' clients should explicitly set shell=True.')
else:
cmd = ' '.join(cmd_helper.SingleQuote(s) for s in cmd)
if env:
env = ' '.join(env_quote(k, v) for k, v in env.iteritems())
cmd = '%s %s' % (env, cmd)
if cwd:
cmd = 'cd %s && %s' % (cmd_helper.SingleQuote(cwd), cmd)
if run_as:
cmd = 'run-as %s sh -c %s' % (cmd_helper.SingleQuote(run_as),
cmd_helper.SingleQuote(cmd))
if as_root and self.NeedsSU():
# "su -c sh -c" allows using shell features in |cmd|
cmd = self._Su('sh -c %s' % cmd_helper.SingleQuote(cmd))
output = handle_large_output(cmd, large_output)
if raw_output:
return output
output = output.splitlines()
if single_line:
if not output:
return ''
elif len(output) == 1:
return output[0]
else:
msg = 'one line of output was expected, but got: %s'
raise device_errors.CommandFailedError(msg % output, str(self))
else:
return output
def _RunPipedShellCommand(self, script, **kwargs):
PIPESTATUS_LEADER = 'PIPESTATUS: '
script += '; echo "%s${PIPESTATUS[@]}"' % PIPESTATUS_LEADER
kwargs.update(shell=True, check_return=True)
output = self.RunShellCommand(script, **kwargs)
pipestatus_line = output[-1]
if not pipestatus_line.startswith(PIPESTATUS_LEADER):
logger.error('Pipe exit statuses of shell script missing.')
raise device_errors.AdbShellCommandFailedError(
script, output, status=None,
device_serial=self.serial)
output = output[:-1]
statuses = [
int(s) for s in pipestatus_line[len(PIPESTATUS_LEADER):].split()]
if any(statuses):
raise device_errors.AdbShellCommandFailedError(
script, output, status=statuses,
device_serial=self.serial)
return output
@decorators.WithTimeoutAndRetriesFromInstance()
def KillAll(self, process_name, exact=False, signum=device_signal.SIGKILL,
as_root=False, blocking=False, quiet=False,
timeout=None, retries=None):
"""Kill all processes with the given name on the device.
Args:
process_name: A string containing the name of the process to kill.
exact: A boolean indicating whether to kill all processes matching
the string |process_name| exactly, or all of those which contain
|process_name| as a substring. Defaults to False.
signum: An integer containing the signal number to send to kill. Defaults
to SIGKILL (9).
as_root: A boolean indicating whether the kill should be executed with
root privileges.
blocking: A boolean indicating whether we should wait until all processes
with the given |process_name| are dead.
quiet: A boolean indicating whether to ignore the fact that no processes
to kill were found.
timeout: timeout in seconds
retries: number of retries
Returns:
The number of processes attempted to kill.
Raises:
CommandFailedError if no process was killed and |quiet| is False.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
procs_pids = self.GetPids(process_name)
if exact:
procs_pids = {process_name: procs_pids.get(process_name, [])}
pids = set(itertools.chain(*procs_pids.values()))
if not pids:
if quiet:
return 0
else:
raise device_errors.CommandFailedError(
'No process "%s"' % process_name, str(self))
logger.info(
'KillAll(%r, ...) attempting to kill the following:', process_name)
for name, ids in procs_pids.iteritems():
for i in ids:
logger.info(' %05s %s', str(i), name)
cmd = ['kill', '-%d' % signum] + sorted(pids)
self.RunShellCommand(cmd, as_root=as_root, check_return=True)
def all_pids_killed():
procs_pids_remain = self.GetPids(process_name)
return not pids.intersection(itertools.chain(*procs_pids_remain.values()))
if blocking:
timeout_retry.WaitFor(all_pids_killed, wait_period=0.1)
return len(pids)
@decorators.WithTimeoutAndRetriesFromInstance()
def StartActivity(self, intent_obj, blocking=False, trace_file_name=None,
force_stop=False, timeout=None, retries=None):
"""Start package's activity on the device.
Args:
intent_obj: An Intent object to send.
blocking: A boolean indicating whether we should wait for the activity to
finish launching.
trace_file_name: If present, a string that both indicates that we want to
profile the activity and contains the path to which the
trace should be saved.
force_stop: A boolean indicating whether we should stop the activity
before starting it.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the activity could not be started.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
cmd = ['am', 'start']
if blocking:
cmd.append('-W')
if trace_file_name:
cmd.extend(['--start-profiler', trace_file_name])
if force_stop:
cmd.append('-S')
cmd.extend(intent_obj.am_args)
for line in self.RunShellCommand(cmd, check_return=True):
if line.startswith('Error:'):
raise device_errors.CommandFailedError(line, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def StartInstrumentation(self, component, finish=True, raw=False,
extras=None, timeout=None, retries=None):
if extras is None:
extras = {}
cmd = ['am', 'instrument']
if finish:
cmd.append('-w')
if raw:
cmd.append('-r')
for k, v in extras.iteritems():
cmd.extend(['-e', str(k), str(v)])
cmd.append(component)
# Store the package name in a shell variable to help the command stay under
# the _MAX_ADB_COMMAND_LENGTH limit.
package = component.split('/')[0]
shell_snippet = 'p=%s;%s' % (package,
cmd_helper.ShrinkToSnippet(cmd, 'p', package))
return self.RunShellCommand(shell_snippet, shell=True, check_return=True,
large_output=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def BroadcastIntent(self, intent_obj, timeout=None, retries=None):
"""Send a broadcast intent.
Args:
intent: An Intent to broadcast.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
cmd = ['am', 'broadcast'] + intent_obj.am_args
self.RunShellCommand(cmd, check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def GoHome(self, timeout=None, retries=None):
"""Return to the home screen and obtain launcher focus.
This command launches the home screen and attempts to obtain
launcher focus until the timeout is reached.
Args:
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def is_launcher_focused():
output = self.RunShellCommand(['dumpsys', 'window', 'windows'],
check_return=True, large_output=True)
return any(self._LAUNCHER_FOCUSED_RE.match(l) for l in output)
def dismiss_popups():
# There is a dialog present; attempt to get rid of it.
# Not all dialogs can be dismissed with back.
self.SendKeyEvent(keyevent.KEYCODE_ENTER)
self.SendKeyEvent(keyevent.KEYCODE_BACK)
return is_launcher_focused()
# If Home is already focused, return early to avoid unnecessary work.
if is_launcher_focused():
return
self.StartActivity(
intent.Intent(action='android.intent.action.MAIN',
category='android.intent.category.HOME'),
blocking=True)
if not is_launcher_focused():
timeout_retry.WaitFor(dismiss_popups, wait_period=1)
@decorators.WithTimeoutAndRetriesFromInstance()
def ForceStop(self, package, timeout=None, retries=None):
"""Close the application.
Args:
package: A string containing the name of the package to stop.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if self.GetPids(package):
self.RunShellCommand(['am', 'force-stop', package], check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def ClearApplicationState(
self, package, permissions=None, timeout=None, retries=None):
"""Clear all state for the given package.
Args:
package: A string containing the name of the package to stop.
permissions: List of permissions to set after clearing data.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
# Check that the package exists before clearing it for android builds below
# JB MR2. Necessary because calling pm clear on a package that doesn't exist
# may never return.
if ((self.build_version_sdk >= version_codes.JELLY_BEAN_MR2)
or self._GetApplicationPathsInternal(package)):
self.RunShellCommand(['pm', 'clear', package], check_return=True)
self.GrantPermissions(package, permissions)
@decorators.WithTimeoutAndRetriesFromInstance()
def SendKeyEvent(self, keycode, timeout=None, retries=None):
"""Sends a keycode to the device.
See the devil.android.sdk.keyevent module for suitable keycode values.
Args:
keycode: A integer keycode to send to the device.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
self.RunShellCommand(['input', 'keyevent', format(keycode, 'd')],
check_return=True)
PUSH_CHANGED_FILES_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
@decorators.WithTimeoutAndRetriesFromInstance(
min_default_timeout=PUSH_CHANGED_FILES_DEFAULT_TIMEOUT)
def PushChangedFiles(self, host_device_tuples, timeout=None,
retries=None, delete_device_stale=False):
"""Push files to the device, skipping files that don't need updating.
When a directory is pushed, it is traversed recursively on the host and
all files in it are pushed to the device as needed.
Additionally, if delete_device_stale option is True,
files that exist on the device but don't exist on the host are deleted.
Args:
host_device_tuples: A list of (host_path, device_path) tuples, where
|host_path| is an absolute path of a file or directory on the host
that should be minimially pushed to the device, and |device_path| is
an absolute path of the destination on the device.
timeout: timeout in seconds
retries: number of retries
delete_device_stale: option to delete stale files on device
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
all_changed_files = []
all_stale_files = []
missing_dirs = set()
cache_commit_funcs = []
for h, d in host_device_tuples:
assert os.path.isabs(h) and posixpath.isabs(d)
h = os.path.realpath(h)
changed_files, up_to_date_files, stale_files, cache_commit_func = (
self._GetChangedAndStaleFiles(h, d, delete_device_stale))
all_changed_files += changed_files
all_stale_files += stale_files
cache_commit_funcs.append(cache_commit_func)
if changed_files and not up_to_date_files and not stale_files:
if os.path.isdir(h):
missing_dirs.add(d)
else:
missing_dirs.add(posixpath.dirname(d))
if delete_device_stale and all_stale_files:
self.RunShellCommand(['rm', '-f'] + all_stale_files, check_return=True)
if all_changed_files:
if missing_dirs:
try:
self.RunShellCommand(['mkdir', '-p'] + list(missing_dirs),
check_return=True)
except device_errors.AdbShellCommandFailedError as e:
# TODO(crbug.com/739899): This is attempting to diagnose flaky EBUSY
# errors that have been popping up in single-device scenarios.
# Remove it once we've figured out what's causing them and how best
# to handle them.
m = _EBUSY_RE.search(e.output)
if m:
logging.error(
'Hit EBUSY while attempting to make missing directories.')
logging.error('lsof output:')
for l in self._RunPipedShellCommand(
'lsof | grep %s' % cmd_helper.SingleQuote(m.group(1))):
logging.error(' %s', l)
raise
self._PushFilesImpl(host_device_tuples, all_changed_files)
for func in cache_commit_funcs:
func()
def _GetChangedAndStaleFiles(self, host_path, device_path, track_stale=False):
"""Get files to push and delete
Args:
host_path: an absolute path of a file or directory on the host
device_path: an absolute path of a file or directory on the device
track_stale: whether to bother looking for stale files (slower)
Returns:
a four-element tuple
1st element: a list of (host_files_path, device_files_path) tuples to push
2nd element: a list of host_files_path that are up-to-date
3rd element: a list of stale files under device_path, or [] when
track_stale == False
4th element: a cache commit function.
"""
try:
# Length calculations below assume no trailing /.
host_path = host_path.rstrip('/')
device_path = device_path.rstrip('/')
specific_device_paths = [device_path]
ignore_other_files = not track_stale and os.path.isdir(host_path)
if ignore_other_files:
specific_device_paths = []
for root, _, filenames in os.walk(host_path):
relative_dir = root[len(host_path) + 1:]
specific_device_paths.extend(
posixpath.join(device_path, relative_dir, f) for f in filenames)
def calculate_host_checksums():
return md5sum.CalculateHostMd5Sums([host_path])
def calculate_device_checksums():
if self._enable_device_files_cache:
cache_entry = self._cache['device_path_checksums'].get(device_path)
if cache_entry and cache_entry[0] == ignore_other_files:
return dict(cache_entry[1])
sums = md5sum.CalculateDeviceMd5Sums(specific_device_paths, self)
cache_entry = [ignore_other_files, sums]
self._cache['device_path_checksums'][device_path] = cache_entry
return dict(sums)
host_checksums, device_checksums = reraiser_thread.RunAsync((
calculate_host_checksums,
calculate_device_checksums))
except EnvironmentError as e:
logger.warning('Error calculating md5: %s', e)
return ([(host_path, device_path)], [], [], lambda: 0)
to_push = []
up_to_date = []
to_delete = []
if os.path.isfile(host_path):
host_checksum = host_checksums.get(host_path)
device_checksum = device_checksums.get(device_path)
if host_checksum == device_checksum:
up_to_date.append(host_path)
else:
to_push.append((host_path, device_path))
else:
for host_abs_path, host_checksum in host_checksums.iteritems():
device_abs_path = posixpath.join(
device_path, os.path.relpath(host_abs_path, host_path))
device_checksum = device_checksums.pop(device_abs_path, None)
if device_checksum == host_checksum:
up_to_date.append(host_abs_path)
else:
to_push.append((host_abs_path, device_abs_path))
to_delete = device_checksums.keys()
def cache_commit_func():
new_sums = {posixpath.join(device_path, path[len(host_path) + 1:]): val
for path, val in host_checksums.iteritems()}
cache_entry = [ignore_other_files, new_sums]
self._cache['device_path_checksums'][device_path] = cache_entry
return (to_push, up_to_date, to_delete, cache_commit_func)
def _ComputeDeviceChecksumsForApks(self, package_name):
ret = self._cache['package_apk_checksums'].get(package_name)
if ret is None:
device_paths = self._GetApplicationPathsInternal(package_name)
file_to_checksums = md5sum.CalculateDeviceMd5Sums(device_paths, self)
ret = set(file_to_checksums.values())
self._cache['package_apk_checksums'][package_name] = ret
return ret
def _ComputeStaleApks(self, package_name, host_apk_paths):
def calculate_host_checksums():
return md5sum.CalculateHostMd5Sums(host_apk_paths)
def calculate_device_checksums():
return self._ComputeDeviceChecksumsForApks(package_name)
host_checksums, device_checksums = reraiser_thread.RunAsync((
calculate_host_checksums, calculate_device_checksums))
stale_apks = [k for (k, v) in host_checksums.iteritems()
if v not in device_checksums]
return stale_apks, set(host_checksums.values())
def _PushFilesImpl(self, host_device_tuples, files):
if not files:
return
size = sum(host_utils.GetRecursiveDiskUsage(h) for h, _ in files)
file_count = len(files)
dir_size = sum(host_utils.GetRecursiveDiskUsage(h)
for h, _ in host_device_tuples)
dir_file_count = 0
for h, _ in host_device_tuples:
if os.path.isdir(h):
dir_file_count += sum(len(f) for _r, _d, f in os.walk(h))
else:
dir_file_count += 1
push_duration = self._ApproximateDuration(
file_count, file_count, size, False)
dir_push_duration = self._ApproximateDuration(
len(host_device_tuples), dir_file_count, dir_size, False)
zip_duration = self._ApproximateDuration(1, 1, size, True)
if (dir_push_duration < push_duration and dir_push_duration < zip_duration
# TODO(jbudorick): Resume directory pushing once clients have switched
# to 1.0.36-compatible syntax.
and False):
self._PushChangedFilesIndividually(host_device_tuples)
elif push_duration < zip_duration:
self._PushChangedFilesIndividually(files)
elif self._commands_installed is False:
# Already tried and failed to install unzip command.
self._PushChangedFilesIndividually(files)
elif not self._PushChangedFilesZipped(
files, [d for _, d in host_device_tuples]):
self._PushChangedFilesIndividually(files)
def _MaybeInstallCommands(self):
if self._commands_installed is None:
try:
if not install_commands.Installed(self):
install_commands.InstallCommands(self)
self._commands_installed = True
except device_errors.CommandFailedError as e:
logger.warning('unzip not available: %s', str(e))
self._commands_installed = False
return self._commands_installed
@staticmethod
def _ApproximateDuration(adb_calls, file_count, byte_count, is_zipping):
# We approximate the time to push a set of files to a device as:
# t = c1 * a + c2 * f + c3 + b / c4 + b / (c5 * c6), where
# t: total time (sec)
# c1: adb call time delay (sec)
# a: number of times adb is called (unitless)
# c2: push time delay (sec)
# f: number of files pushed via adb (unitless)
# c3: zip time delay (sec)
# c4: zip rate (bytes/sec)
# b: total number of bytes (bytes)
# c5: transfer rate (bytes/sec)
# c6: compression ratio (unitless)
# All of these are approximations.
ADB_CALL_PENALTY = 0.1 # seconds
ADB_PUSH_PENALTY = 0.01 # seconds
ZIP_PENALTY = 2.0 # seconds
ZIP_RATE = 10000000.0 # bytes / second
TRANSFER_RATE = 2000000.0 # bytes / second
COMPRESSION_RATIO = 2.0 # unitless
adb_call_time = ADB_CALL_PENALTY * adb_calls
adb_push_setup_time = ADB_PUSH_PENALTY * file_count
if is_zipping:
zip_time = ZIP_PENALTY + byte_count / ZIP_RATE
transfer_time = byte_count / (TRANSFER_RATE * COMPRESSION_RATIO)
else:
zip_time = 0
transfer_time = byte_count / TRANSFER_RATE
return adb_call_time + adb_push_setup_time + zip_time + transfer_time
def _PushChangedFilesIndividually(self, files):
for h, d in files:
self.adb.Push(h, d)
def _PushChangedFilesZipped(self, files, dirs):
with tempfile.NamedTemporaryFile(suffix='.zip') as zip_file:
zip_proc = multiprocessing.Process(
target=DeviceUtils._CreateDeviceZip,
args=(zip_file.name, files))
zip_proc.start()
try:
# While it's zipping, ensure the unzip command exists on the device.
if not self._MaybeInstallCommands():
zip_proc.terminate()
return False
# Warm up NeedsSU cache while we're still zipping.
self.NeedsSU()
with device_temp_file.DeviceTempFile(
self.adb, suffix='.zip') as device_temp:
zip_proc.join()
self.adb.Push(zip_file.name, device_temp.name)
quoted_dirs = ' '.join(cmd_helper.SingleQuote(d) for d in dirs)
self.RunShellCommand(
'unzip %s&&chmod -R 777 %s' % (device_temp.name, quoted_dirs),
shell=True, as_root=True,
env={'PATH': '%s:$PATH' % install_commands.BIN_DIR},
check_return=True)
finally:
if zip_proc.is_alive():
zip_proc.terminate()
return True
@staticmethod
def _CreateDeviceZip(zip_path, host_device_tuples):
with zipfile.ZipFile(zip_path, 'w') as zip_file:
for host_path, device_path in host_device_tuples:
zip_utils.WriteToZipFile(zip_file, host_path, device_path)
# TODO(nednguyen): remove this and migrate the callsite to PathExists().
@decorators.WithTimeoutAndRetriesFromInstance()
def FileExists(self, device_path, timeout=None, retries=None):
"""Checks whether the given file exists on the device.
Arguments are the same as PathExists.
"""
return self.PathExists(device_path, timeout=timeout, retries=retries)
@decorators.WithTimeoutAndRetriesFromInstance()
def PathExists(self, device_paths, as_root=False, timeout=None, retries=None):
"""Checks whether the given path(s) exists on the device.
Args:
device_path: A string containing the absolute path to the file on the
device, or an iterable of paths to check.
as_root: Whether root permissions should be use to check for the existence
of the given path(s).
timeout: timeout in seconds
retries: number of retries
Returns:
True if the all given paths exist on the device, False otherwise.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
paths = device_paths
if isinstance(paths, basestring):
paths = (paths,)
if not paths:
return True
cmd = ['test', '-e', paths[0]]
for p in paths[1:]:
cmd.extend(['-a', '-e', p])
try:
self.RunShellCommand(cmd, as_root=as_root, check_return=True,
timeout=timeout, retries=retries)
return True
except device_errors.CommandFailedError:
return False
@decorators.WithTimeoutAndRetriesFromInstance()
def RemovePath(self, device_path, force=False, recursive=False,
as_root=False, rename=False, timeout=None, retries=None):
"""Removes the given path(s) from the device.
Args:
device_path: A string containing the absolute path to the file on the
device, or an iterable of paths to check.
force: Whether to remove the path(s) with force (-f).
recursive: Whether to remove any directories in the path(s) recursively.
as_root: Whether root permissions should be use to remove the given
path(s).
rename: Whether to rename the path(s) before removing to help avoid
filesystem errors. See https://stackoverflow.com/questions/11539657
timeout: timeout in seconds
retries: number of retries
"""
def _RenamePath(path):
random_suffix = hex(random.randint(2 ** 12, 2 ** 16 - 1))[2:]
dest = '%s-%s' % (path, random_suffix)
try:
self.RunShellCommand(
['mv', path, dest], as_root=as_root, check_return=True)
return dest
except device_errors.AdbShellCommandFailedError:
# If it couldn't be moved, just try rm'ing the original path instead.
return path
args = ['rm']
if force:
args.append('-f')
if recursive:
args.append('-r')
if isinstance(device_path, basestring):
args.append(device_path if not rename else _RenamePath(device_path))
else:
args.extend(
device_path if not rename else [_RenamePath(p) for p in device_path])
self.RunShellCommand(args, as_root=as_root, check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def PullFile(self, device_path, host_path, timeout=None, retries=None):
"""Pull a file from the device.
Args:
device_path: A string containing the absolute path of the file to pull
from the device.
host_path: A string containing the absolute path of the destination on
the host.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
"""
# Create the base dir if it doesn't exist already
dirname = os.path.dirname(host_path)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
self.adb.Pull(device_path, host_path)
def _ReadFileWithPull(self, device_path):
try:
d = tempfile.mkdtemp()
host_temp_path = os.path.join(d, 'tmp_ReadFileWithPull')
self.adb.Pull(device_path, host_temp_path)
with open(host_temp_path, 'r') as host_temp:
return host_temp.read()
finally:
if os.path.exists(d):
shutil.rmtree(d)
@decorators.WithTimeoutAndRetriesFromInstance()
def ReadFile(self, device_path, as_root=False, force_pull=False,
timeout=None, retries=None):
"""Reads the contents of a file from the device.
Args:
device_path: A string containing the absolute path of the file to read
from the device.
as_root: A boolean indicating whether the read should be executed with
root privileges.
force_pull: A boolean indicating whether to force the operation to be
performed by pulling a file from the device. The default is, when the
contents are short, to retrieve the contents using cat instead.
timeout: timeout in seconds
retries: number of retries
Returns:
The contents of |device_path| as a string. Contents are intepreted using
universal newlines, so the caller will see them encoded as '\n'. Also,
all lines will be terminated.
Raises:
AdbCommandFailedError if the file can't be read.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
def get_size(path):
return self.FileSize(path, as_root=as_root)
if (not force_pull
and 0 < get_size(device_path) <= self._MAX_ADB_OUTPUT_LENGTH):
return _JoinLines(self.RunShellCommand(
['cat', device_path], as_root=as_root, check_return=True))
elif as_root and self.NeedsSU():
with device_temp_file.DeviceTempFile(self.adb) as device_temp:
cmd = 'SRC=%s DEST=%s;cp "$SRC" "$DEST" && chmod 666 "$DEST"' % (
cmd_helper.SingleQuote(device_path),
cmd_helper.SingleQuote(device_temp.name))
self.RunShellCommand(cmd, shell=True, as_root=True, check_return=True)
return self._ReadFileWithPull(device_temp.name)
else:
return self._ReadFileWithPull(device_path)
def _WriteFileWithPush(self, device_path, contents):
with tempfile.NamedTemporaryFile() as host_temp:
host_temp.write(contents)
host_temp.flush()
self.adb.Push(host_temp.name, device_path)
@decorators.WithTimeoutAndRetriesFromInstance()
def WriteFile(self, device_path, contents, as_root=False, force_push=False,
timeout=None, retries=None):
"""Writes |contents| to a file on the device.
Args:
device_path: A string containing the absolute path to the file to write
on the device.
contents: A string containing the data to write to the device.
as_root: A boolean indicating whether the write should be executed with
root privileges (if available).
force_push: A boolean indicating whether to force the operation to be
performed by pushing a file to the device. The default is, when the
contents are short, to pass the contents using a shell script instead.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if the file could not be written on the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if not force_push and len(contents) < self._MAX_ADB_COMMAND_LENGTH:
# If the contents are small, for efficieny we write the contents with
# a shell command rather than pushing a file.
cmd = 'echo -n %s > %s' % (cmd_helper.SingleQuote(contents),
cmd_helper.SingleQuote(device_path))
self.RunShellCommand(cmd, shell=True, as_root=as_root, check_return=True)
elif as_root and self.NeedsSU():
# Adb does not allow to "push with su", so we first push to a temp file
# on a safe location, and then copy it to the desired location with su.
with device_temp_file.DeviceTempFile(self.adb) as device_temp:
self._WriteFileWithPush(device_temp.name, contents)
# Here we need 'cp' rather than 'mv' because the temp and
# destination files might be on different file systems (e.g.
# on internal storage and an external sd card).
self.RunShellCommand(['cp', device_temp.name, device_path],
as_root=True, check_return=True)
else:
# If root is not needed, we can push directly to the desired location.
self._WriteFileWithPush(device_path, contents)
def _ParseLongLsOutput(self, device_path, as_root=False, **kwargs):
"""Run and scrape the output of 'ls -a -l' on a device directory."""
device_path = posixpath.join(device_path, '') # Force trailing '/'.
output = self.RunShellCommand(
['ls', '-a', '-l', device_path], as_root=as_root,
check_return=True, env={'TZ': 'utc'}, **kwargs)
if output and output[0].startswith('total '):
output.pop(0) # pylint: disable=maybe-no-member
entries = []
for line in output:
m = _LONG_LS_OUTPUT_RE.match(line)
if m:
if m.group('filename') not in ['.', '..']:
entries.append(m.groupdict())
else:
logger.info('Skipping: %s', line)
return entries
def ListDirectory(self, device_path, as_root=False, **kwargs):
"""List all files on a device directory.
Mirroring os.listdir (and most client expectations) the resulting list
does not include the special entries '.' and '..' even if they are present
in the directory.
Args:
device_path: A string containing the path of the directory on the device
to list.
as_root: A boolean indicating whether the to use root privileges to list
the directory contents.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of filenames for all entries contained in the directory.
Raises:
AdbCommandFailedError if |device_path| does not specify a valid and
accessible directory in the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
entries = self._ParseLongLsOutput(device_path, as_root=as_root, **kwargs)
return [d['filename'] for d in entries]
def StatDirectory(self, device_path, as_root=False, **kwargs):
"""List file and stat info for all entries on a device directory.
Implementation notes: this is currently implemented by parsing the output
of 'ls -a -l' on the device. Whether possible and convenient, we attempt to
make parsing strict and return values mirroring those of the standard |os|
and |stat| Python modules.
Mirroring os.listdir (and most client expectations) the resulting list
does not include the special entries '.' and '..' even if they are present
in the directory.
Args:
device_path: A string containing the path of the directory on the device
to list.
as_root: A boolean indicating whether the to use root privileges to list
the directory contents.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of dictionaries, each containing the following keys:
filename: A string with the file name.
st_mode: File permissions, use the stat module to interpret these.
st_nlink: Number of hard links (may be missing).
st_owner: A string with the user name of the owner.
st_group: A string with the group name of the owner.
st_rdev_pair: Device type as (major, minior) (only if inode device).
st_size: Size of file, in bytes (may be missing for non-regular files).
st_mtime: Time of most recent modification, in seconds since epoch
(although resolution is in minutes).
symbolic_link_to: If entry is a symbolic link, path where it points to;
missing otherwise.
Raises:
AdbCommandFailedError if |device_path| does not specify a valid and
accessible directory in the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
entries = self._ParseLongLsOutput(device_path, as_root=as_root, **kwargs)
for d in entries:
for key, value in d.items():
if value is None:
del d[key] # Remove missing fields.
d['st_mode'] = _ParseModeString(d['st_mode'])
d['st_mtime'] = calendar.timegm(
time.strptime(d['st_mtime'], _LS_DATE_FORMAT))
for key in ['st_nlink', 'st_size', 'st_rdev_major', 'st_rdev_minor']:
if key in d:
d[key] = int(d[key])
if 'st_rdev_major' in d and 'st_rdev_minor' in d:
d['st_rdev_pair'] = (d.pop('st_rdev_major'), d.pop('st_rdev_minor'))
return entries
def StatPath(self, device_path, as_root=False, **kwargs):
"""Get the stat attributes of a file or directory on the device.
Args:
device_path: A string containing the path of a file or directory from
which to get attributes.
as_root: A boolean indicating whether the to use root privileges to
access the file information.
timeout: timeout in seconds
retries: number of retries
Returns:
A dictionary with the stat info collected; see StatDirectory for details.
Raises:
CommandFailedError if device_path cannot be found on the device.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
dirname, filename = posixpath.split(posixpath.normpath(device_path))
for entry in self.StatDirectory(dirname, as_root=as_root, **kwargs):
if entry['filename'] == filename:
return entry
raise device_errors.CommandFailedError(
'Cannot find file or directory: %r' % device_path, str(self))
def FileSize(self, device_path, as_root=False, **kwargs):
"""Get the size of a file on the device.
Note: This is implemented by parsing the output of the 'ls' command on
the device. On some Android versions, when passing a directory or special
file, the size is *not* reported and this function will throw an exception.
Args:
device_path: A string containing the path of a file on the device.
as_root: A boolean indicating whether the to use root privileges to
access the file information.
timeout: timeout in seconds
retries: number of retries
Returns:
The size of the file in bytes.
Raises:
CommandFailedError if device_path cannot be found on the device, or
its size cannot be determited for some reason.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
entry = self.StatPath(device_path, as_root=as_root, **kwargs)
try:
return entry['st_size']
except KeyError:
raise device_errors.CommandFailedError(
'Could not determine the size of: %s' % device_path, str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def SetJavaAsserts(self, enabled, timeout=None, retries=None):
"""Enables or disables Java asserts.
Args:
enabled: A boolean indicating whether Java asserts should be enabled
or disabled.
timeout: timeout in seconds
retries: number of retries
Returns:
True if the device-side property changed and a restart is required as a
result, False otherwise.
Raises:
CommandTimeoutError on timeout.
"""
def find_property(lines, property_name):
for index, line in enumerate(lines):
if line.strip() == '':
continue
key_value = tuple(s.strip() for s in line.split('=', 1))
if len(key_value) != 2:
continue
key, value = key_value
if key == property_name:
return index, value
return None, ''
new_value = 'all' if enabled else ''
# First ensure the desired property is persisted.
try:
properties = self.ReadFile(self.LOCAL_PROPERTIES_PATH).splitlines()
except device_errors.CommandFailedError:
properties = []
index, value = find_property(properties, self.JAVA_ASSERT_PROPERTY)
if new_value != value:
if new_value:
new_line = '%s=%s' % (self.JAVA_ASSERT_PROPERTY, new_value)
if index is None:
properties.append(new_line)
else:
properties[index] = new_line
else:
assert index is not None # since new_value == '' and new_value != value
properties.pop(index)
self.WriteFile(self.LOCAL_PROPERTIES_PATH, _JoinLines(properties))
# Next, check the current runtime value is what we need, and
# if not, set it and report that a reboot is required.
value = self.GetProp(self.JAVA_ASSERT_PROPERTY)
if new_value != value:
self.SetProp(self.JAVA_ASSERT_PROPERTY, new_value)
return True
else:
return False
def GetLanguage(self, cache=False):
"""Returns the language setting on the device.
Args:
cache: Whether to use cached properties when available.
"""
return self.GetProp('persist.sys.language', cache=cache)
def GetCountry(self, cache=False):
"""Returns the country setting on the device.
Args:
cache: Whether to use cached properties when available.
"""
return self.GetProp('persist.sys.country', cache=cache)
@property
def screen_density(self):
"""Returns the screen density of the device."""
DPI_TO_DENSITY = {
120: 'ldpi',
160: 'mdpi',
240: 'hdpi',
320: 'xhdpi',
480: 'xxhdpi',
640: 'xxxhdpi',
}
return DPI_TO_DENSITY.get(self.pixel_density, 'tvdpi')
@property
def pixel_density(self):
return int(self.GetProp('ro.sf.lcd_density', cache=True))
@property
def build_description(self):
"""Returns the build description of the system.
For example:
nakasi-user 4.4.4 KTU84P 1227136 release-keys
"""
return self.GetProp('ro.build.description', cache=True)
@property
def build_fingerprint(self):
"""Returns the build fingerprint of the system.
For example:
google/nakasi/grouper:4.4.4/KTU84P/1227136:user/release-keys
"""
return self.GetProp('ro.build.fingerprint', cache=True)
@property
def build_id(self):
"""Returns the build ID of the system (e.g. 'KTU84P')."""
return self.GetProp('ro.build.id', cache=True)
@property
def build_product(self):
"""Returns the build product of the system (e.g. 'grouper')."""
return self.GetProp('ro.build.product', cache=True)
@property
def build_type(self):
"""Returns the build type of the system (e.g. 'user')."""
return self.GetProp('ro.build.type', cache=True)
@property
def build_version_sdk(self):
"""Returns the build version sdk of the system as a number (e.g. 19).
For version code numbers see:
http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
For named constants see devil.android.sdk.version_codes
Raises:
CommandFailedError if the build version sdk is not a number.
"""
value = self.GetProp('ro.build.version.sdk', cache=True)
try:
return int(value)
except ValueError:
raise device_errors.CommandFailedError(
'Invalid build version sdk: %r' % value)
@property
def product_cpu_abi(self):
"""Returns the product cpu abi of the device (e.g. 'armeabi-v7a')."""
return self.GetProp('ro.product.cpu.abi', cache=True)
@property
def product_model(self):
"""Returns the name of the product model (e.g. 'Nexus 7')."""
return self.GetProp('ro.product.model', cache=True)
@property
def product_name(self):
"""Returns the product name of the device (e.g. 'nakasi')."""
return self.GetProp('ro.product.name', cache=True)
@property
def product_board(self):
"""Returns the product board name of the device (e.g. 'shamu')."""
return self.GetProp('ro.product.board', cache=True)
def _EnsureCacheInitialized(self):
"""Populates cache token, runs getprop and fetches $EXTERNAL_STORAGE."""
if self._cache['token']:
return
with self._cache_lock:
if self._cache['token']:
return
# Change the token every time to ensure that it will match only the
# previously dumped cache.
token = str(uuid.uuid1())
cmd = (
'c=/data/local/tmp/cache_token;'
'echo $EXTERNAL_STORAGE;'
'cat $c 2>/dev/null||echo;'
'echo "%s">$c &&' % token +
'getprop'
)
output = self.RunShellCommand(
cmd, shell=True, check_return=True, large_output=True)
# Error-checking for this existing is done in GetExternalStoragePath().
self._cache['external_storage'] = output[0]
self._cache['prev_token'] = output[1]
output = output[2:]
prop_cache = self._cache['getprop']
prop_cache.clear()
for key, value in _GETPROP_RE.findall(''.join(output)):
prop_cache[key] = value
self._cache['token'] = token
@decorators.WithTimeoutAndRetriesFromInstance()
def GetProp(self, property_name, cache=False, timeout=None, retries=None):
"""Gets a property from the device.
Args:
property_name: A string containing the name of the property to get from
the device.
cache: Whether to use cached properties when available.
timeout: timeout in seconds
retries: number of retries
Returns:
The value of the device's |property_name| property.
Raises:
CommandTimeoutError on timeout.
"""
assert isinstance(property_name, basestring), (
"property_name is not a string: %r" % property_name)
if cache:
# It takes ~120ms to query a single property, and ~130ms to query all
# properties. So, when caching we always query all properties.
self._EnsureCacheInitialized()
else:
# timeout and retries are handled down at run shell, because we don't
# want to apply them in the other branch when reading from the cache
value = self.RunShellCommand(
['getprop', property_name], single_line=True, check_return=True,
timeout=timeout, retries=retries)
self._cache['getprop'][property_name] = value
# Non-existent properties are treated as empty strings by getprop.
return self._cache['getprop'].get(property_name, '')
@decorators.WithTimeoutAndRetriesFromInstance()
def SetProp(self, property_name, value, check=False, timeout=None,
retries=None):
"""Sets a property on the device.
Args:
property_name: A string containing the name of the property to set on
the device.
value: A string containing the value to set to the property on the
device.
check: A boolean indicating whether to check that the property was
successfully set on the device.
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError if check is true and the property was not correctly
set on the device (e.g. because it is not rooted).
CommandTimeoutError on timeout.
"""
assert isinstance(property_name, basestring), (
"property_name is not a string: %r" % property_name)
assert isinstance(value, basestring), "value is not a string: %r" % value
self.RunShellCommand(['setprop', property_name, value], check_return=True)
prop_cache = self._cache['getprop']
if property_name in prop_cache:
del prop_cache[property_name]
# TODO(perezju) remove the option and make the check mandatory, but using a
# single shell script to both set- and getprop.
if check and value != self.GetProp(property_name, cache=False):
raise device_errors.CommandFailedError(
'Unable to set property %r on the device to %r'
% (property_name, value), str(self))
@decorators.WithTimeoutAndRetriesFromInstance()
def GetABI(self, timeout=None, retries=None):
"""Gets the device main ABI.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
The device's main ABI name.
Raises:
CommandTimeoutError on timeout.
"""
return self.GetProp('ro.product.cpu.abi', cache=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def GetPids(self, process_name=None, timeout=None, retries=None):
"""Returns the PIDs of processes containing the given name as substring.
Note that the |process_name| is often the package name.
Args:
process_name: A string containing the process name to get the PIDs for.
If missing returns PIDs for all processes.
timeout: timeout in seconds
retries: number of retries
Returns:
A dict mapping process name to a list of PIDs for each process that
contained the provided |process_name|.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
procs_pids = collections.defaultdict(list)
try:
ps_cmd = 'ps'
# ps behavior was changed in Android above N, http://crbug.com/686716
if (self.build_version_sdk >= version_codes.NOUGAT_MR1
and self.build_id[0] > 'N'):
ps_cmd = 'ps -e'
if process_name:
ps_output = self._RunPipedShellCommand(
'%s | grep -F %s' % (ps_cmd, cmd_helper.SingleQuote(process_name)))
else:
ps_output = self.RunShellCommand(
ps_cmd.split(), check_return=True, large_output=True)
except device_errors.AdbShellCommandFailedError as e:
if e.status and isinstance(e.status, list) and not e.status[0]:
# If ps succeeded but grep failed, there were no processes with the
# given name.
return procs_pids
else:
raise
process_name = process_name or ''
for line in ps_output:
try:
ps_data = line.split()
pid, process = ps_data[1], ps_data[-1]
if process_name in process and pid != 'PID':
procs_pids[process].append(pid)
except IndexError:
pass
return procs_pids
def GetApplicationPids(self, process_name, at_most_one=False, **kwargs):
"""Returns the PID or PIDs of a given process name.
Note that the |process_name|, often the package name, must match exactly.
Args:
process_name: A string containing the process name to get the PIDs for.
at_most_one: A boolean indicating that at most one PID is expected to
be found.
timeout: timeout in seconds
retries: number of retries
Returns:
A list of the PIDs for the named process. If at_most_one=True returns
the single PID found or None otherwise.
Raises:
CommandFailedError if at_most_one=True and more than one PID is found
for the named process.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
pids = self.GetPids(process_name, **kwargs).get(process_name, [])
if at_most_one:
if len(pids) > 1:
raise device_errors.CommandFailedError(
'Expected a single process but found PIDs: %s.' % ', '.join(pids),
device_serial=str(self))
return pids[0] if pids else None
else:
return pids
@decorators.WithTimeoutAndRetriesFromInstance()
def GetEnforce(self, timeout=None, retries=None):
"""Get the current mode of SELinux.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
True (enforcing), False (permissive), or None (disabled).
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
output = self.RunShellCommand(
['getenforce'], check_return=True, single_line=True).lower()
if output not in _SELINUX_MODE:
raise device_errors.CommandFailedError(
'Unexpected getenforce output: %s' % output)
return _SELINUX_MODE[output]
@decorators.WithTimeoutAndRetriesFromInstance()
def SetEnforce(self, enabled, timeout=None, retries=None):
"""Modify the mode SELinux is running in.
Args:
enabled: a boolean indicating whether to put SELinux in encorcing mode
(if True), or permissive mode (otherwise).
timeout: timeout in seconds
retries: number of retries
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
self.RunShellCommand(
['setenforce', '1' if int(enabled) else '0'], as_root=True,
check_return=True)
@decorators.WithTimeoutAndRetriesFromInstance()
def TakeScreenshot(self, host_path=None, timeout=None, retries=None):
"""Takes a screenshot of the device.
Args:
host_path: A string containing the path on the host to save the
screenshot to. If None, a file name in the current
directory will be generated.
timeout: timeout in seconds
retries: number of retries
Returns:
The name of the file on the host to which the screenshot was saved.
Raises:
CommandFailedError on failure.
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
if not host_path:
host_path = os.path.abspath('screenshot-%s-%s.png' % (
self.serial, _GetTimeStamp()))
with device_temp_file.DeviceTempFile(self.adb, suffix='.png') as device_tmp:
self.RunShellCommand(['/system/bin/screencap', '-p', device_tmp.name],
check_return=True)
self.PullFile(device_tmp.name, host_path)
return host_path
@decorators.WithTimeoutAndRetriesFromInstance()
def GetMemoryUsageForPid(self, pid, timeout=None, retries=None):
"""Gets the memory usage for the given PID.
Args:
pid: PID of the process.
timeout: timeout in seconds
retries: number of retries
Returns:
A dict containing memory usage statistics for the PID. May include:
Size, Rss, Pss, Shared_Clean, Shared_Dirty, Private_Clean,
Private_Dirty, VmHWM
Raises:
CommandTimeoutError on timeout.
"""
result = collections.defaultdict(int)
try:
result.update(self._GetMemoryUsageForPidFromSmaps(pid))
except device_errors.CommandFailedError:
logger.exception('Error getting memory usage from smaps')
try:
result.update(self._GetMemoryUsageForPidFromStatus(pid))
except device_errors.CommandFailedError:
logger.exception('Error getting memory usage from status')
return result
@decorators.WithTimeoutAndRetriesFromInstance()
def DismissCrashDialogIfNeeded(self, timeout=None, retries=None):
"""Dismiss the error/ANR dialog if present.
Returns: Name of the crashed package if a dialog is focused,
None otherwise.
"""
def _FindFocusedWindow():
match = None
# TODO(jbudorick): Try to grep the output on the device instead of using
# large_output if/when DeviceUtils exposes a public interface for piped
# shell command handling.
for line in self.RunShellCommand(['dumpsys', 'window', 'windows'],
check_return=True, large_output=True):
match = re.match(_CURRENT_FOCUS_CRASH_RE, line)
if match:
break
return match
match = _FindFocusedWindow()
if not match:
return None
package = match.group(2)
logger.warning('Trying to dismiss %s dialog for %s', *match.groups())
self.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
self.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
self.SendKeyEvent(keyevent.KEYCODE_ENTER)
match = _FindFocusedWindow()
if match:
logger.error('Still showing a %s dialog for %s', *match.groups())
return package
def _GetMemoryUsageForPidFromSmaps(self, pid):
SMAPS_COLUMNS = (
'Size', 'Rss', 'Pss', 'Shared_Clean', 'Shared_Dirty', 'Private_Clean',
'Private_Dirty')
showmap_out = self._RunPipedShellCommand(
'showmap %d | grep TOTAL' % int(pid), as_root=True)
split_totals = showmap_out[-1].split()
if (not split_totals
or len(split_totals) != 9
or split_totals[-1] != 'TOTAL'):
raise device_errors.CommandFailedError(
'Invalid output from showmap: %s' % '\n'.join(showmap_out))
return dict(itertools.izip(SMAPS_COLUMNS, (int(n) for n in split_totals)))
def _GetMemoryUsageForPidFromStatus(self, pid):
for line in self.ReadFile(
'/proc/%s/status' % str(pid), as_root=True).splitlines():
if line.startswith('VmHWM:'):
return {'VmHWM': int(line.split()[1])}
raise device_errors.CommandFailedError(
'Could not find memory peak value for pid %s', str(pid))
def GetLogcatMonitor(self, *args, **kwargs):
"""Returns a new LogcatMonitor associated with this device.
Parameters passed to this function are passed directly to
|logcat_monitor.LogcatMonitor| and are documented there.
"""
return logcat_monitor.LogcatMonitor(self.adb, *args, **kwargs)
def GetClientCache(self, client_name):
"""Returns client cache."""
if client_name not in self._client_caches:
self._client_caches[client_name] = {}
return self._client_caches[client_name]
def _ClearCache(self):
"""Clears all caches."""
for client in self._client_caches:
self._client_caches[client].clear()
self._cache = {
# Map of packageId -> list of on-device .apk paths
'package_apk_paths': {},
# Set of packageId that were loaded from LoadCacheData and not yet
# verified.
'package_apk_paths_to_verify': set(),
# Map of packageId -> set of on-device .apk checksums
'package_apk_checksums': {},
# Map of property_name -> value
'getprop': {},
# Map of device_path -> [ignore_other_files, map of path->checksum]
'device_path_checksums': {},
# Location of sdcard ($EXTERNAL_STORAGE).
'external_storage': None,
# Token used to detect when LoadCacheData is stale.
'token': None,
'prev_token': None,
}
@decorators.WithTimeoutAndRetriesFromInstance()
def LoadCacheData(self, data, timeout=None, retries=None):
"""Initializes the cache from data created using DumpCacheData.
The cache is used only if its token matches the one found on the device.
This prevents a stale cache from being used (which can happen when sharing
devices).
Args:
data: A previously serialized cache (string).
timeout: timeout in seconds
retries: number of retries
Returns:
Whether the cache was loaded.
"""
obj = json.loads(data)
self._EnsureCacheInitialized()
given_token = obj.get('token')
if not given_token or self._cache['prev_token'] != given_token:
logger.warning('Stale cache detected. Not using it.')
return False
self._cache['package_apk_paths'] = obj.get('package_apk_paths', {})
# When using a cache across script invokations, verify that apps have
# not been uninstalled.
self._cache['package_apk_paths_to_verify'] = set(
self._cache['package_apk_paths'].iterkeys())
package_apk_checksums = obj.get('package_apk_checksums', {})
for k, v in package_apk_checksums.iteritems():
package_apk_checksums[k] = set(v)
self._cache['package_apk_checksums'] = package_apk_checksums
device_path_checksums = obj.get('device_path_checksums', {})
self._cache['device_path_checksums'] = device_path_checksums
return True
@decorators.WithTimeoutAndRetriesFromInstance()
def DumpCacheData(self, timeout=None, retries=None):
"""Dumps the current cache state to a string.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
A serialized cache as a string.
"""
self._EnsureCacheInitialized()
obj = {}
obj['token'] = self._cache['token']
obj['package_apk_paths'] = self._cache['package_apk_paths']
obj['package_apk_checksums'] = self._cache['package_apk_checksums']
# JSON can't handle sets.
for k, v in obj['package_apk_checksums'].iteritems():
obj['package_apk_checksums'][k] = list(v)
obj['device_path_checksums'] = self._cache['device_path_checksums']
return json.dumps(obj, separators=(',', ':'))
@classmethod
def parallel(cls, devices, async=False):
"""Creates a Parallelizer to operate over the provided list of devices.
Args:
devices: A list of either DeviceUtils instances or objects from
from which DeviceUtils instances can be constructed. If None,
all attached devices will be used.
async: If true, returns a Parallelizer that runs operations
asynchronously.
Returns:
A Parallelizer operating over |devices|.
"""
devices = [d if isinstance(d, cls) else cls(d) for d in devices]
if async:
return parallelizer.Parallelizer(devices)
else:
return parallelizer.SyncParallelizer(devices)
@classmethod
def HealthyDevices(cls, blacklist=None, device_arg='default', **kwargs):
"""Returns a list of DeviceUtils instances.
Returns a list of DeviceUtils instances that are attached, not blacklisted,
and optionally filtered by --device flags or ANDROID_SERIAL environment
variable.
Args:
blacklist: A DeviceBlacklist instance (optional). Device serials in this
blacklist will never be returned, but a warning will be logged if they
otherwise would have been.
device_arg: The value of the --device flag. This can be:
'default' -> Same as [], but returns an empty list rather than raise a
NoDevicesError.
[] -> Returns all devices, unless $ANDROID_SERIAL is set.
None -> Use $ANDROID_SERIAL if set, otherwise looks for a single
attached device. Raises an exception if multiple devices are
attached.
'serial' -> Returns an instance for the given serial, if not
blacklisted.
['A', 'B', ...] -> Returns instances for the subset that is not
blacklisted.
A device serial, or a list of device serials (optional).
Returns:
A list of DeviceUtils instances.
Raises:
NoDevicesError: Raised when no non-blacklisted devices exist and
device_arg is passed.
MultipleDevicesError: Raise when multiple devices exist, but |device_arg|
is None.
"""
allow_no_devices = False
if device_arg == 'default':
allow_no_devices = True
device_arg = ()
select_multiple = True
if not (isinstance(device_arg, tuple) or isinstance(device_arg, list)):
select_multiple = False
if device_arg:
device_arg = (device_arg,)
blacklisted_devices = blacklist.Read() if blacklist else []
# adb looks for ANDROID_SERIAL, so support it as well.
android_serial = os.environ.get('ANDROID_SERIAL')
if not device_arg and android_serial:
device_arg = (android_serial,)
def blacklisted(serial):
if serial in blacklisted_devices:
logger.warning('Device %s is blacklisted.', serial)
return True
return False
if device_arg:
devices = [cls(x, **kwargs) for x in device_arg if not blacklisted(x)]
else:
devices = []
for adb in adb_wrapper.AdbWrapper.Devices():
if not blacklisted(adb.GetDeviceSerial()):
devices.append(cls(_CreateAdbWrapper(adb), **kwargs))
if len(devices) == 0 and not allow_no_devices:
raise device_errors.NoDevicesError()
if len(devices) > 1 and not select_multiple:
raise device_errors.MultipleDevicesError(devices)
return sorted(devices)
@decorators.WithTimeoutAndRetriesFromInstance()
def RestartAdbd(self, timeout=None, retries=None):
logger.info('Restarting adbd on device.')
with device_temp_file.DeviceTempFile(self.adb, suffix='.sh') as script:
self.WriteFile(script.name, _RESTART_ADBD_SCRIPT)
self.RunShellCommand(
['source', script.name], check_return=True, as_root=True)
self.adb.WaitForDevice()
@decorators.WithTimeoutAndRetriesFromInstance()
def GrantPermissions(self, package, permissions, timeout=None, retries=None):
# Permissions only need to be set on M and above because of the changes to
# the permission model.
if not permissions or self.build_version_sdk < version_codes.MARSHMALLOW:
return
permissions = set(
p for p in permissions if not _PERMISSIONS_BLACKLIST_RE.match(p))
if ('android.permission.WRITE_EXTERNAL_STORAGE' in permissions
and 'android.permission.READ_EXTERNAL_STORAGE' not in permissions):
permissions.add('android.permission.READ_EXTERNAL_STORAGE')
script = ';'.join([
'p={package}',
'for q in {permissions}',
'do pm grant "$p" "$q"',
'echo "{sep}$q{sep}$?{sep}"',
'done'
]).format(
package=cmd_helper.SingleQuote(package),
permissions=' '.join(
cmd_helper.SingleQuote(p) for p in sorted(permissions)),
sep=_SHELL_OUTPUT_SEPARATOR)
logger.info('Setting permissions for %s.', package)
res = self.RunShellCommand(
script, shell=True, raw_output=True, large_output=True,
check_return=True)
res = res.split(_SHELL_OUTPUT_SEPARATOR)
failures = [
(permission, output.strip())
for permission, status, output in zip(res[1::3], res[2::3], res[0::3])
if int(status)]
if failures:
logger.warning(
'Failed to grant some permissions. Blacklist may need to be updated?')
for permission, output in failures:
# Try to grab the relevant error message from the output.
m = _PERMISSIONS_EXCEPTION_RE.search(output)
if m:
error_msg = m.group(0)
elif len(output) > 200:
error_msg = repr(output[:200]) + ' (truncated)'
else:
error_msg = repr(output)
logger.warning('- %s: %s', permission, error_msg)
@decorators.WithTimeoutAndRetriesFromInstance()
def IsScreenOn(self, timeout=None, retries=None):
"""Determines if screen is on.
Dumpsys input_method exposes screen on/off state. Below is an explination of
the states.
Pre-L:
On: mScreenOn=true
Off: mScreenOn=false
L+:
On: mInteractive=true
Off: mInteractive=false
Returns:
True if screen is on, false if it is off.
Raises:
device_errors.CommandFailedError: If screen state cannot be found.
"""
if self.build_version_sdk < version_codes.LOLLIPOP:
input_check = 'mScreenOn'
check_value = 'mScreenOn=true'
else:
input_check = 'mInteractive'
check_value = 'mInteractive=true'
dumpsys_out = self._RunPipedShellCommand(
'dumpsys input_method | grep %s' % input_check)
if not dumpsys_out:
raise device_errors.CommandFailedError(
'Unable to detect screen state', str(self))
return check_value in dumpsys_out[0]
@decorators.WithTimeoutAndRetriesFromInstance()
def SetScreen(self, on, timeout=None, retries=None):
"""Turns screen on and off.
Args:
on: bool to decide state to switch to. True = on False = off.
"""
def screen_test():
return self.IsScreenOn() == on
if screen_test():
logger.info('Screen already in expected state.')
return
self.SendKeyEvent(keyevent.KEYCODE_POWER)
timeout_retry.WaitFor(screen_test, wait_period=1)
| catapult-project/catapult-csm | devil/devil/android/device_utils.py | Python | bsd-3-clause | 103,314 |
# -*- test-case-name: go.apps.jsbox.tests.test_log -*-
# -*- coding: utf-8 -*-
import logging
import datetime
from twisted.internet.defer import inlineCallbacks, returnValue
from vxsandbox import LoggingResource
from vumi import log
from vumi.persist.redis_base import Manager
from vumi.persist.txredis_manager import TxRedisManager
class LogManager(object):
"""
Store and retrieves logs for a jsbox application.
"""
# this uses Manager.calls_manager so that it can be used from
# Django.
DEFAULT_MAX_LOGS_PER_CONVERSATION = 1000
DEFAULT_SUB_STORE = "jsbox_logs_store"
def __init__(self, redis, max_logs_per_conversation=None,
sub_store=DEFAULT_SUB_STORE):
if sub_store is not None:
redis = redis.sub_manager(sub_store)
self.redis = self.manager = redis
if max_logs_per_conversation is None:
max_logs_per_conversation = self.DEFAULT_MAX_LOGS_PER_CONVERSATION
self.max_logs_per_conversation = max_logs_per_conversation
def _conv_key(self, campaign_key, conversation_key):
return ":".join([campaign_key, conversation_key])
@Manager.calls_manager
def add_log(self, campaign_key, conversation_key, msg, level):
ts = datetime.datetime.utcnow().isoformat()
full_msg = "[%s, %s] %s" % (ts, logging.getLevelName(level), msg)
conv_key = self._conv_key(campaign_key, conversation_key)
yield self.redis.lpush(conv_key, full_msg)
yield self.redis.ltrim(conv_key, 0, self.max_logs_per_conversation - 1)
@Manager.calls_manager
def get_logs(self, campaign_key, conversation_key):
conv_key = self._conv_key(campaign_key, conversation_key)
msgs = yield self.redis.lrange(conv_key, 0, -1)
returnValue(msgs)
class GoLoggingResource(LoggingResource):
"""
Resource that allows a sandbox to log messages.
Messages are logged both via Twisted's logging framework and
to a per-conversation log store in Redis.
"""
@inlineCallbacks
def setup(self):
super(GoLoggingResource, self).setup()
redis_config = self.config.get('redis_manager', {})
max_logs_per_conversation = self.config.get(
'max_logs_per_conversation')
self._redis = yield TxRedisManager.from_config(redis_config)
self.log_manager = LogManager(
self._redis, max_logs_per_conversation=max_logs_per_conversation)
@inlineCallbacks
def teardown(self):
yield self._redis.close_manager()
yield super(GoLoggingResource, self).teardown()
@inlineCallbacks
def log(self, api, msg, level):
conv = self.app_worker.conversation_for_api(api)
campaign_key = conv.user_account.key
conversation_key = conv.key
# The keys may be unicode, so make everything unicode and then encode.
internal_msg = u"[Account: %s, Conversation: %s] %r" % (
campaign_key, conversation_key, msg)
log.msg(internal_msg.encode("ascii"), logLevel=level)
yield self.log_manager.add_log(campaign_key, conversation_key,
msg, level)
| praekelt/vumi-go | go/apps/jsbox/log.py | Python | bsd-3-clause | 3,171 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
gateway tests - Users
Copyright 2009-2015 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
pytest fixtures used as defined in conftest.py:
- gatewaywrapper
"""
import omero
import pytest
from omero.gateway.scripts import dbhelpers
class TestUser (object):
def testUsers(self, gatewaywrapper):
gatewaywrapper.loginAsUser()
# Try reconnecting without disconnect
gatewaywrapper._has_connected = False
gatewaywrapper.doConnect()
gatewaywrapper.loginAsAuthor()
gatewaywrapper.loginAsAdmin()
def testSaveAs(self, gatewaywrapper):
for u in (gatewaywrapper.AUTHOR, gatewaywrapper.ADMIN):
# Test image should be owned by author
gatewaywrapper.loginAsAuthor()
image = gatewaywrapper.getTestImage(autocreate=True)
ownername = image.getOwnerOmeName()
# Now login as author or admin
gatewaywrapper.doLogin(u)
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
image = gatewaywrapper.getTestImage()
assert ownername == gatewaywrapper.AUTHOR.name
# Create some object
param = omero.sys.Parameters()
param.map = {
'ns': omero.rtypes.rstring('weblitz.UserTest.testSaveAs')}
queryService = gatewaywrapper.gateway.getQueryService()
anns = queryService.findAllByQuery(
'from CommentAnnotation as a where a.ns=:ns', param)
assert len(anns) == 0
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup()
ann = omero.gateway.CommentAnnotationWrapper(
conn=gatewaywrapper.gateway)
ann.setNs(param.map['ns'].val)
ann.setValue('foo')
ann.saveAs(image.getDetails())
# Annotations are owned by author
gatewaywrapper.loginAsAuthor()
try:
queryService = gatewaywrapper.gateway.getQueryService()
anns = queryService.findAllByQuery(
'from CommentAnnotation as a where a.ns=:ns', param)
assert len(anns) == 1
assert omero.gateway.CommentAnnotationWrapper(
gatewaywrapper.gateway, anns[0]).getOwnerOmeName(), \
gatewaywrapper.AUTHOR.name
finally:
gatewaywrapper.gateway.getUpdateService().deleteObject(
ann._obj)
queryService = gatewaywrapper.gateway.getQueryService()
anns = queryService.findAllByQuery(
'from CommentAnnotation as a where a.ns=:ns', param)
assert len(anns) == 0
def testCrossGroupSave(self, gatewaywrapper):
gatewaywrapper.loginAsUser()
uid = gatewaywrapper.gateway.getUserId()
gatewaywrapper.loginAsAdmin()
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
d = gatewaywrapper.getTestDataset()
did = d.getId()
g = d.getDetails().getGroup()
gid = g.getId()
chmod = omero.cmd.Chmod2(targetObjects={'ExperimenterGroup': [gid]})
admin = gatewaywrapper.gateway.getAdminService()
admin.addGroups(omero.model.ExperimenterI(uid, False), [g._obj])
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
# make sure the group is groupwrite enabled
perms = str(d.getDetails().getGroup().getDetails().permissions)
chmod.permissions = 'rwrw--'
gatewaywrapper.gateway.c.submit(chmod)
d = gatewaywrapper.getTestDataset()
g = d.getDetails().getGroup()
assert g.getDetails().permissions.isGroupWrite()
gatewaywrapper.loginAsUser()
# User is now a member of the group to which testDataset belongs,
# which has groupWrite==True
# But the default group for User is diferent
try:
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
d = gatewaywrapper.getTestDataset()
did = d.getId()
n = d.getName()
d.setName(n+'_1')
d.save()
d = gatewaywrapper.gateway.getObject('dataset', did)
assert d.getName() == n+'_1'
d.setName(n)
d.save()
d = gatewaywrapper.gateway.getObject('dataset', did)
assert d.getName() == n
finally:
# Revert group permissions
gatewaywrapper.loginAsAdmin()
chmod.permissions = perms
gatewaywrapper.gateway.c.submit(chmod)
@pytest.mark.broken(ticket="11545")
def testCrossGroupRead(self, gatewaywrapper):
gatewaywrapper.loginAsAuthor()
p = gatewaywrapper.getTestProject()
assert str(p.getDetails().permissions)[4] == '-'
d = p.getDetails()
g = d.getGroup()
gatewaywrapper.loginAsUser()
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
assert not g.getId() in \
gatewaywrapper.gateway.getEventContext().memberOfGroups
assert gatewaywrapper.gateway.getObject('project', p.getId()) is None
def testGroupOverObjPermissions(self, gatewaywrapper):
""" Object accesss must be dependent only of group permissions """
# Author
gatewaywrapper.loginAsAuthor()
# create group with rw----
# create project and annotation in that group
p = dbhelpers.ProjectEntry(
'testAnnotationPermissions', None,
create_group='testAnnotationPermissions', group_perms='rw----')
try:
p = p.create(gatewaywrapper.gateway)
except dbhelpers.BadGroupPermissionsException:
gatewaywrapper.loginAsAdmin()
admin = gatewaywrapper.gateway.getAdminService()
group = admin.lookupGroup('testAnnotationPermissions')
group_as_target = {'ExperimenterGroup': [group.id.val]}
chmod = omero.cmd.Chmod2(targetObjects=group_as_target,
permissions='rw----')
gatewaywrapper.gateway.c.submit(chmod)
gatewaywrapper.loginAsAuthor()
p = p.create(gatewaywrapper.gateway)
pid = p.getId()
g = p.getDetails().getGroup()._obj
try:
# Admin
# add User to group
gatewaywrapper.loginAsUser()
uid = gatewaywrapper.gateway.getUserId()
gatewaywrapper.loginAsAdmin()
admin = gatewaywrapper.gateway.getAdminService()
admin.addGroups(omero.model.ExperimenterI(uid, False), [g])
# User
# try to read project and annotation, which fails
gatewaywrapper.loginAsUser()
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
assert gatewaywrapper.gateway.getObject('project', pid) is None
# Admin
# Chmod project to rwrw--
gatewaywrapper.loginAsAdmin()
group_as_target = {'ExperimenterGroup': [g.id.val]}
chmod = omero.cmd.Chmod2(targetObjects=group_as_target,
permissions='rwrw--')
gatewaywrapper.gateway.c.submit(chmod)
# Author
# check project has proper permissions
gatewaywrapper.loginAsAuthor()
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
pa = gatewaywrapper.gateway.getObject('project', pid)
assert pa is not None
# User
# read project and annotation
gatewaywrapper.loginAsUser()
gatewaywrapper.gateway.SERVICE_OPTS.setOmeroGroup('-1')
assert gatewaywrapper.gateway.getObject(
'project', pid) is not None
finally:
gatewaywrapper.loginAsAuthor()
handle = gatewaywrapper.gateway.deleteObjects(
'Project', [p.getId()], deleteAnns=True, deleteChildren=True)
gatewaywrapper.waitOnCmd(gatewaywrapper.gateway.c, handle)
| dominikl/openmicroscopy | components/tools/OmeroPy/test/integration/gatewaytest/test_user.py | Python | gpl-2.0 | 8,119 |
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_field_listing():
lyr = mapnik.Layer('test')
lyr.datasource = mapnik.Shapefile(file='../data/shp/poly.shp')
fields = lyr.datasource.fields()
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
def test_total_feature_count():
lyr = mapnik.Layer('test')
lyr.datasource = mapnik.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
num_feats = len(features)
eq_(num_feats, 10)
def test_feature_envelope():
lyr = mapnik.Layer('test')
lyr.datasource = mapnik.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
for feat in features:
env = feat.envelope()
contains = lyr.envelope().contains(env)
eq_(contains, True)
intersects = lyr.envelope().contains(env)
eq_(intersects, True)
def test_feature_attributes():
lyr = mapnik.Layer('test')
lyr.datasource = mapnik.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
feat = features[0]
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
eq_(feat.attributes, attrs)
eq_(lyr.datasource.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
eq_(lyr.datasource.field_types(),[float,int,str])
| h4ck3rm1k3/MapNickAutotools | tests/python_tests/datasource_test.py | Python | lgpl-2.1 | 1,512 |
# -*- coding: utf-8 -*-
###############################################################################
#
# ODOO (ex OpenERP)
# Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
# Developer: Nicola Riolini @thebrush (<https://it.linkedin.com/in/thebrush>)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.tools.translate import _
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class QualityExportExcelReport(orm.TransientModel):
''' Wizard for export data in Excel
'''
_name = 'quality.export.excel.report'
# --------------------
# Wizard button event:
# --------------------
def action_print(self, cr, uid, ids, context=None):
''' Event for print report
'''
if context is None:
context = {}
# Pool used:
excel_pool = self.pool.get('excel.writer')
wiz_proxy = self.browse(cr, uid, ids, context=context)[0]
report = wiz_proxy.report
if report == 'conformed':
state_name = wiz_proxy.state_conformed or ''
elif report == 'claim':
state_name = wiz_proxy.state or ''
else:
return True # not present
# Parameters:
parameter_db = {
'claim': {
# Excel:
'header': [
_('Rif.'), _('Data'),
_('Partner'), _('Destinazione'), _('Rif. cliente'),
_('Descrizione'), _('Dettaglio'), _('Analisi'),
_('Origini'), _('Cause'), _('Gravita\''), _('Stato'),
# TODO lot?
],
'header_width': [
15, 20,
40, 40, 20,
50, 50, 50,
30, 30, 30, 20,
],
# Translate:
'report': 'Reclami',
'state': {
'draft': 'Bozza',
'comunication': 'Comunicazione',
'opened': 'Aperto',
'nc': 'Nota di credito',
'done': 'Nota di credito fatta',
'closed': 'Chiuso',
'cancel': 'Annullato',
'saw': 'Visto',
},
# Fields:
'date': 'date',
'subject': 'subject',
#'origin': 'origin_id',
# TODO
},
'conformed': {
# Excel:
# TODO Change:
'header': [
_('Rif.'), _('Data'), _('Fornitore'),
_('Descrizione'),
_('Gravita\''),
_('Stato'),
_('Quantita'),
_('Temperatura'),
_('Etichetta'),
_('Confezione'),
_('Qualita'),
_('Scadenza'),
_('Igenico/Sanitario'),
_('Ritardo'),
_('Mancata consegna'),
_('Corpi estranei'),
],
'header_width': [
15, 20, 20,
20,
40,
40,
5, 5, 5, 5, 5,
5, 5, 5, 5, 5,
],
# Translate:
'report': u'Non Conformità',
'state' : {
'draft': 'Bozza',
'opened': 'Aperto',
'Closed': 'Chiuso',
'Cancel': 'Cancellato',
'Saw': 'Visto',
},
# Field:
'date': 'insert_date',
'subject': 'name',
#'origin': 'origin',
# TODO
},
}
# ---------------------------------------------------------------------
# Domain creation:
# ---------------------------------------------------------------------
domain = []
filter_description = 'Report: %s' % parameter_db[report]['report']
# Date:
field_name = parameter_db[report]['date']
if wiz_proxy.from_date:
domain.append((field_name, '>=', '%s 00:00:00' % \
wiz_proxy.from_date[:10]))
filter_description += _(', Dalla data: %s 00:00:00') % \
wiz_proxy.from_date[:10]
if wiz_proxy.to_date:
domain.append((field_name, '<=', '%s 23:59:59' % \
wiz_proxy.to_date[:10]))
filter_description += _(', Alla data: %s 23:59:59') % \
wiz_proxy.to_date[:10]
# Text:
field_name = parameter_db[report]['subject']
if wiz_proxy.subject:
domain.append((field_name, 'ilike', wiz_proxy.subject))
filter_description += _(', Oggetto: "%s"') % wiz_proxy.subject
# One2many:
if wiz_proxy.partner_id:
domain.append(('partner_id', '=', wiz_proxy.partner_id.id))
filter_description += _(', Partner: %s') % \
wiz_proxy.partner_id.name
if wiz_proxy.supplier_lot:
domain.append(('supplier_lot', '=', wiz_proxy.supplier_lot))
filter_description += _(', Fornitore Lotto: %s') % \
wiz_proxy.supplier_lot.name
if wiz_proxy.reference_user_id:
domain.append(
('reference_user_id', '=', wiz_proxy.reference_user_id.id))
filter_description += _(', Riferimento: %s') % \
wiz_proxy.reference_user_id.name
if wiz_proxy.origin_id:
domain.append(('origin_id', '=', wiz_proxy.origin_id.id))
filter_description += _(', Origine: %s') % wiz_proxy.origin_id.name
if wiz_proxy.cause_id:
domain.append(('cause_id', '=', wiz_proxy.cause_id.id))
filter_description += _(', Cause: %s') % wiz_proxy.cause_id.name
if wiz_proxy.gravity_id:
domain.append(('gravity_id', '=', wiz_proxy.gravity_id.id))
filter_description += _(', Gravita\': %s') % \
wiz_proxy.gravity_id.name
if wiz_proxy.conformed_type:
domain.append((wiz_proxy.conformed_type, '=', True))
filter_description += _('Tipo: %s') % \
wiz_proxy.conformed_type
if state_name:
domain.append(('state', '=', state_name))
filter_description += _(', Stato: %s'
) % parameter_db[report]['state'].get(state_name, '')
# ---------------------------------------------------------------------
# REPORT CASES:
# ---------------------------------------------------------------------
# Parameters:
ws_name = _(parameter_db[report]['report'])
name_of_file = _('%s.xls' % report)
# -----------------------------------------------------------------
# Create Excel file:
# -----------------------------------------------------------------
# Worksheet:
ws = excel_pool.create_worksheet(ws_name)
# Format:
excel_pool.set_format()
format_title = excel_pool.get_format('title')
format_header = excel_pool.get_format('header')
format_text = excel_pool.get_format('text')
excel_pool.column_width(ws_name, parameter_db[report]['header_width'])
# Title:
row = 0
excel_pool.write_xls_line(ws_name, row, [
_('Filtro:'),
filter_description,
], format_title)
# Header:
row = 1
excel_pool.write_xls_line(ws_name, row, parameter_db[report]['header'],
format_header)
# ---------------------------------------------------------------------
# Load data:
# ---------------------------------------------------------------------
if report == 'claim':
claim_pool = self.pool.get('quality.claim')
claim_ids = claim_pool.search(cr, uid, domain, context=context)
for claim in sorted(
claim_pool.browse(
cr, uid, claim_ids, context=context),
key=lambda x: (x.date, x.ref)):
row += 1
data = [
claim.ref or '',
claim.date,
claim.partner_id.name,
claim.partner_address_id.name or '',
claim.customer_ref or '',
claim.name or '',
claim.subject or '',
claim.analysis or '',
claim.origin_id.name or '',
claim.cause_id.name or '',
claim.gravity_id.name or '',
parameter_db[report]['state'].get(state_name, ''),
]
excel_pool.write_xls_line(ws_name, row, data, format_text)
elif report == 'conformed':
conformed_pool = self.pool.get('quality.conformed')
conformed_ids = conformed_pool.search(cr, uid, domain, context=context)
for conformed in sorted(
conformed_pool.browse(
cr, uid, conformed_ids, context=context),
key=lambda x: (x.insert_date, x.ref)):
row += 1
data = [
conformed.ref or '',
conformed.insert_date,
conformed.supplier_lot.name,
conformed.name or '',
conformed.gravity_id.name or '',
parameter_db[report]['state'].get(state_name, ''),
'X' if conformed.quantity else '',
'X' if conformed.temperature else '',
'X' if conformed.label else '',
'X' if conformed.aesthetic_packaging else '',
'X' if conformed.quality else '',
'X' if conformed.deadline else '',
'X' if conformed.sanitation else '',
'X' if conformed.delay else '',
'X' if conformed.no_delivery else '',
'X' if conformed.external_material else '',
]
excel_pool.write_xls_line(ws_name, row, data, format_text)
return excel_pool.return_attachment(cr, uid, ws_name,
name_of_file=name_of_file, version='7.0', php=True,
context=context)
_columns = {
'report': fields.selection([
('claim', 'Claim'),
('conformed', 'Not conformed'),
], 'Report', required=True),
'from_date': fields.date('From date >= '),
'to_date': fields.date('To date <='),
'subject': fields.char('Subject', size=100),
'supplier_lot': fields.many2one('res.partner', 'Supplier'),
'partner_id': fields.many2one('res.partner', 'Customer'),
'origin_id': fields.many2one('quality.origin', 'Origin'),
'cause_id': fields.many2one('quality.claim.cause', 'Cause'),
'gravity_id': fields.many2one('quality.gravity', 'Gravity'),
'reference_user_id': fields.many2one('res.users', 'Reference user',
help="Reference for claim to your customer"),
'conformed_type': fields.selection([
('quantity', u'Quantità'),
('temperature', 'Temperatura'),
('label', 'Etichetta'),
('aesthetic_packaging', 'Confezione'),
('quality', u'Qualità'),
('deadline', 'Scadenza'),
('sanitation', 'Igenico/Sanitario'),
('delay', 'Ritardo'),
('no_delivery', 'Mancata Consegna'),
('external_material', 'Corpi estranei'),
], 'Tipo'),
# Claim state:
'state': fields.selection([
('draft', 'Draft'),
('comunication', 'Comunication'),
('opened', 'Opened'),
('nc', 'Credit Note'),
('done', 'Credit Note Done'),
('closed', 'Closed'), # TODO Vista RAQ
('cancel', 'Cancel'),
('saw', 'Saw'),
], 'State'),
# Conformed state:
'state_conformed': fields.selection([
('draft', 'Draft'),
('opened', 'Opened'),
('closed', 'Closed'),
('cancel', 'Cancel'),
('saw', 'Saw'),
], 'State'),
}
_defaults = {
'report': lambda *x: 'claim',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Micronaet/micronaet-quality | quality_export_report/wizard/export_document.py | Python | agpl-3.0 | 14,444 |
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
from amaascore.tools.generate_party import generate_address, generate_email
class PartyChildrenTest(unittest.TestCase):
def setUp(self):
self.longMessage = True # Print complete error message on failure
def tearDown(self):
pass
def test_InvalidEmail(self):
with self.assertRaisesRegexp(ValueError, 'Invalid email'):
email = generate_email('invalid.email.amaas.com')
def test_InvalidAddress(self):
with self.assertRaisesRegexp(ValueError, 'Country ID should be a ISO 3166-1 Alpha-3 code'):
address = generate_address(country_id='TEST')
if __name__ == '__main__':
unittest.main()
| amaas-fintech/amaas-core-sdk-python | tests/unit/parties/children.py | Python | apache-2.0 | 763 |
# Copyright (c) Amber Brown, 2015
# See LICENSE for details.
from __future__ import absolute_import, division
from textwrap import dedent
import pkg_resources
from twisted.trial.unittest import TestCase
from collections import OrderedDict
from .._builder import render_fragments, split_fragments
class FormatterTests(TestCase):
def test_split(self):
fragments = {
"": {
("1", "misc", 0): u"",
("baz", "misc", 0): u"",
("2", "feature", 0): u"Foo added.",
("5", "feature", 0): u"Foo added. \n",
("6", "bugfix", 0): u"Foo added.",
},
"Web": {
("3", "bugfix", 0): u"Web fixed. ",
("4", "feature", 0): u"Foo added.",
},
}
expected_output = {
"": {
"misc": {"": ["1", "baz"]},
"feature": {u"Foo added.": ["2", "5"]},
"bugfix": {u"Foo added.": ["6"]},
},
"Web": {
"bugfix": {u"Web fixed.": ["3"]},
"feature": {u"Foo added.": ["4"]},
},
}
definitions = OrderedDict(
[
("feature", {"name": "Features", "showcontent": True}),
("bugfix", {"name": "Bugfixes", "showcontent": True}),
("misc", {"name": "Misc", "showcontent": False}),
]
)
output = split_fragments(fragments, definitions)
self.assertEqual(expected_output, output)
def test_basic(self):
"""
Basic functionality -- getting a bunch of news fragments and formatting
them into a rST file -- works.
"""
fragments = OrderedDict(
[
(
"",
{
# asciibetical sorting will do 1, 142, 9
# we want 1, 9, 142 instead
("142", "misc", 0): u"",
("1", "misc", 0): u"",
("9", "misc", 0): u"",
("bar", "misc", 0): u"",
("4", "feature", 0): u"Stuff!",
("2", "feature", 0): u"Foo added.",
("72", "feature", 0): u"Foo added.",
("9", "feature", 0): u"Foo added.",
("baz", "feature", 0): u"Fun!",
},
),
("Names", {}),
("Web", {("3", "bugfix", 0): u"Web fixed."}),
]
)
definitions = OrderedDict(
[
("feature", {"name": "Features", "showcontent": True}),
("bugfix", {"name": "Bugfixes", "showcontent": True}),
("misc", {"name": "Misc", "showcontent": False}),
]
)
expected_output = u"""MyProject 1.0 (never)
=====================
Features
--------
- Fun! (baz)
- Foo added. (#2, #9, #72)
- Stuff! (#4)
Misc
----
- bar, #1, #9, #142
Names
-----
No significant changes.
Web
---
Bugfixes
~~~~~~~~
- Web fixed. (#3)
"""
template = pkg_resources.resource_string(
"towncrier", "templates/default.rst"
).decode("utf8")
fragments = split_fragments(fragments, definitions)
output = render_fragments(
template,
None,
"",
fragments,
definitions,
["-", "~"],
wrap=True,
versiondata={"name": "MyProject", "version": "1.0", "date": "never"},
)
self.assertEqual(output, expected_output)
# Check again with non-default underlines
expected_output_weird_underlines = u"""MyProject 1.0 (never)
=====================
Features
********
- Fun! (baz)
- Foo added. (#2, #9, #72)
- Stuff! (#4)
Misc
****
- bar, #1, #9, #142
Names
*****
No significant changes.
Web
***
Bugfixes
^^^^^^^^
- Web fixed. (#3)
"""
output = render_fragments(
template,
None,
"",
fragments,
definitions,
["*", "^"],
wrap=True,
versiondata={"name": "MyProject", "version": "1.0", "date": "never"},
)
self.assertEqual(output, expected_output_weird_underlines)
def test_issue_format(self):
"""
issue_format option can be used to format issue text.
And sorting happens before formatting, so numerical issues are still
ordered numerically even if that doesn't match asciibetical order on
the final text.
"""
fragments = {
"": {
# asciibetical sorting will do 1, 142, 9
# we want 1, 9, 142 instead
("142", "misc", 0): u"",
("1", "misc", 0): u"",
("9", "misc", 0): u"",
("bar", "misc", 0): u"",
}
}
definitions = OrderedDict([("misc", {"name": "Misc", "showcontent": False})])
expected_output = u"""MyProject 1.0 (never)
=====================
Misc
----
- xxbar, xx1, xx9, xx142
"""
template = pkg_resources.resource_string(
"towncrier", "templates/default.rst"
).decode("utf8")
fragments = split_fragments(fragments, definitions)
output = render_fragments(
template,
u"xx{issue}",
"",
fragments,
definitions,
["-", "~"],
wrap=True,
versiondata={"name": "MyProject", "version": "1.0", "date": "never"},
)
self.assertEqual(output, expected_output)
def test_line_wrapping(self):
"""
Output is nicely wrapped, but doesn't break up words (which can mess
up URLs)
"""
self.maxDiff = None
fragments = {
"": {
(
"1",
"feature",
0,
): u"""
asdf asdf asdf asdf looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong newsfragment.
""", # NOQA
("2", "feature", 0): u"https://google.com/q=?" + u"-" * 100,
("3", "feature", 0): u"a " * 80,
}
}
definitions = OrderedDict(
[("feature", {"name": "Features", "showcontent": True})]
)
expected_output = u"""MyProject 1.0 (never)
=====================
Features
--------
- asdf asdf asdf asdf
looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
newsfragment. (#1)
-
https://google.com/q=?----------------------------------------------------------------------------------------------------
(#2)
- a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a
a a (#3)
"""
template = pkg_resources.resource_string(
"towncrier", "templates/default.rst"
).decode("utf8")
fragments = split_fragments(fragments, definitions)
output = render_fragments(
template,
None,
"",
fragments,
definitions,
["-", "~"],
wrap=True,
versiondata={"name": "MyProject", "version": "1.0", "date": "never"},
)
self.assertEqual(output, expected_output)
def test_line_wrapping_disabled(self):
"""
Output is not wrapped if it's disabled.
"""
self.maxDiff = None
fragments = {
"": {
(
"1",
"feature",
0,
): u"""
asdf asdf asdf asdf looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong newsfragment.
""", # NOQA
("2", "feature", 0): u"https://google.com/q=?" + u"-" * 100,
("3", "feature", 0): u"a " * 80,
}
}
definitions = OrderedDict(
[("feature", {"name": "Features", "showcontent": True})]
)
expected_output = u"""MyProject 1.0 (never)
=====================
Features
--------
- asdf asdf asdf asdf looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong newsfragment. (#1)
- https://google.com/q=?---------------------------------------------------------------------------------------------------- (#2)
- a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a a (#3)
""" # NOQA
template = pkg_resources.resource_string(
"towncrier", "templates/default.rst"
).decode("utf8")
fragments = split_fragments(fragments, definitions)
output = render_fragments(
template,
None,
"",
fragments,
definitions,
["-", "~"],
wrap=False,
versiondata={"name": "MyProject", "version": "1.0", "date": "never"},
)
self.assertEqual(output, expected_output)
def test_title_format_specified(self):
"""
Title format replaces default top line rendering.
"""
template = pkg_resources.resource_string(
"towncrier", "templates/default.rst"
).decode("utf8")
fragments = split_fragments(fragments={}, definitions=OrderedDict())
output = render_fragments(
template=template,
issue_format=None,
top_line="A custom top line",
fragments=fragments,
definitions=OrderedDict(),
underlines=["-", "~"],
wrap=False,
versiondata={"name": "MyProject", "version": "1.0", "date": "never"},
)
expected_output = dedent(u"""\
A custom top line
=================
""")
self.assertEqual(expected_output, output)
| hawkowl/towncrier | src/towncrier/test/test_format.py | Python | mit | 10,155 |
##########################################################################
#
# Copyright (c) 2019, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import math
import unittest
import IECore
import IECoreScene
import imath
class MeshAlgoNormalsTest( unittest.TestCase ) :
def testPlane( self ) :
p = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ) )
del p["N"]
normals = IECoreScene.MeshAlgo.calculateNormals( p )
self.assertEqual( normals.interpolation, IECoreScene.PrimitiveVariable.Interpolation.Vertex )
self.assertTrue( normals.data.isInstanceOf( IECore.V3fVectorData.staticTypeId() ) )
self.assertEqual( normals.data.size(), p.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( normals.data.getInterpretation(), IECore.GeometricData.Interpretation.Normal )
for n in normals.data :
self.assertEqual( n, imath.V3f( 0, 0, 1 ) )
def testSphere( self ) :
s = IECore.Reader.create( "test/IECore/data/cobFiles/pSphereShape1.cob" ).read()
del s["N"]
normals = IECoreScene.MeshAlgo.calculateNormals( s )
self.assertEqual( normals.interpolation, IECoreScene.PrimitiveVariable.Interpolation.Vertex )
self.assert_( normals.data.isInstanceOf( IECore.V3fVectorData.staticTypeId() ) )
self.assertEqual( normals.data.size(), s.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ) )
self.assertEqual( normals.data.getInterpretation(), IECore.GeometricData.Interpretation.Normal )
points = s["P"].data
for i in range( 0, normals.data.size() ) :
self.assertTrue( math.fabs( normals.data[i].length() - 1 ) < 0.001 )
p = points[i].normalize()
self.assertTrue( normals.data[i].dot( p ) > 0.99 )
self.assertTrue( normals.data[i].dot( p ) < 1.01 )
def testUniformInterpolation( self ) :
m = IECoreScene.MeshPrimitive.createPlane( imath.Box2f( imath.V2f( -1 ), imath.V2f( 1 ) ), imath.V2i( 10 ) )
del m["N"]
normals = IECoreScene.MeshAlgo.calculateNormals( m, interpolation = IECoreScene.PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( normals.interpolation, IECoreScene.PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( len( normals.data ), m.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ) )
for n in normals.data :
self.assertEqual( n, imath.V3f( 0, 0, 1 ) )
if __name__ == "__main__":
unittest.main()
| appleseedhq/cortex | test/IECoreScene/MeshAlgoNormalsTest.py | Python | bsd-3-clause | 4,040 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Rastislav Szabo <raszabo@cisco.com>, Lukas Macko <lmacko@cisco.com>"
__copyright__ = "Copyright 2016, Cisco Systems, Inc."
__license__ = "Apache 2.0"
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# sysrepod must be in PATH
from ConcurrentHelpers import *
import subprocess
import TestModule
import libsysrepoPython3
class SysrepoctlTester(SysrepoTester):
sysrepoctl = "{}/src/sysrepoctl".format(os.path.realpath(os.curdir))
def installModuleStep(self, yang_file, log_level = sr.SR_LL_INF):
self.process = subprocess.Popen([self.sysrepoctl, "-i", "--yang={0}".format(yang_file), "-L {0}".format(log_level)])
rc = self.process.wait()
self.tc.assertEqual(rc, 0)
def uninstallModuleFailStep(self, module_name, log_level = sr.SR_LL_INF):
self.process = subprocess.Popen([self.sysrepoctl, "--uninstall", "--module={0}".format(module_name), "-L {0}".format(log_level)])
rc = self.process.wait()
self.tc.assertNotEquals(rc, 0)
def uninstallModuleStep(self, module_name, log_level = sr.SR_LL_INF):
self.process = subprocess.Popen([self.sysrepoctl, "--uninstall", "--module={0}".format(module_name), "-L {0}".format(log_level)])
rc = self.process.wait()
self.tc.assertEqual(rc, 0)
class SchemasManagementTest(unittest.TestCase):
@classmethod
def setUpClass(self):
TestModule.create_test_module()
def test_ModuleLoading(self):
"""Schemas are loaded on demand. Try to send multiple requests targeting the same model
simultaneously. All of the should receive correct data.
"""
tm = TestManager()
srd = SysrepodDaemonTester("Srd")
tester1 = SysrepoTester("First", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester2 = SysrepoTester("Second", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester3 = SysrepoTester("Third", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester4 = SysrepoTester("Fourth", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
srd.add_step(srd.startDaemonStep)
tester1.add_step(tester1.waitStep)
tester2.add_step(tester2.waitStep)
tester3.add_step(tester3.waitStep)
tester4.add_step(tester4.waitStep)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.restartConnection)
tester2.add_step(tester2.restartConnection)
tester3.add_step(tester3.restartConnection)
tester4.add_step(tester4.restartConnection)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester2.add_step(tester2.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester3.add_step(tester3.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester4.add_step(tester4.getItemsStepExpectedCount, "/test-module:main/*", 19)
srd.add_step(srd.stopDaemonStep)
tm.add_tester(srd)
tm.add_tester(tester1)
tm.add_tester(tester2)
tm.add_tester(tester3)
tm.add_tester(tester4)
tm.run()
def test_module_uninstall(self):
"""A schema can not be uninstalled until it is used by a session.
Test simulates the request of sysrepoctl trying to uninstall/install module.
"""
tmp_file = "/tmp/test-module.yang" # used to reinstall 'test-module' after uninstall
dep_file = "/tmp/referenced-data.yang" # 'test-module' depends on 'referenced-data'
tm = TestManager()
srd = SysrepodDaemonTester("Srd")
tester1 = SysrepoTester("First", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester2 = SysrepoTester("Second", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester3 = SysrepoTester("Third", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
admin = SysrepoctlTester()
srd.add_step(srd.startDaemonStep)
tester1.add_step(tester1.waitStep)
tester2.add_step(tester2.waitStep)
tester3.add_step(tester3.waitStep)
admin.add_step(admin.waitStep)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.restartConnection)
tester2.add_step(tester2.restartConnection)
tester3.add_step(tester3.restartConnection)
admin.add_step(admin.waitStep)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester2.add_step(tester2.setItemStep, "/test-module:main/string", sr.Val("abcd", sr.SR_STRING_T))
tester3.add_step(tester3.lockModelStep, "test-module")
admin.add_step(admin.waitStep)
#unsuccesful try to uninstall
srd.add_step(srd.waitStep)
tester1.add_step(tester1.waitStep)
tester2.add_step(tester2.waitStep)
tester3.add_step(tester3.waitStep)
admin.add_step(admin.uninstallModuleFailStep, "test-module")
#export schema to file before uninstall and release lock
srd.add_step(srd.waitStep)
admin.add_step(admin.waitStep)
tester1.add_step(tester1.getSchemaToFileStep, "test-module", tmp_file)
tester2.add_step(tester2.getSchemaToFileStep, "referenced-data", dep_file)
tester3.add_step(tester3.unlockModelStep, "test-module")
#testers 1,2 close the session, tester 3 releases the lock -> module can be uninstalled
srd.add_step(srd.waitStep)
admin.add_step(admin.waitStep)
tester1.add_step(tester1.stopSession)
tester2.add_step(tester2.stopSession)
tester3.add_step(tester3.waitStep)
#uninstall succeed
srd.add_step(srd.waitStep)
admin.add_step(admin.uninstallModuleStep, "test-module")
tester3.add_step(tester3.waitStep)
#module is uninstalled
srd.add_step(srd.waitStep)
admin.add_step(admin.waitStep)
tester3.add_step(tester3.setItemFailStep, "/test-module:main/string", sr.Val("abcd", sr.SR_STRING_T))
#install module back
srd.add_step(srd.waitStep)
admin.add_step(admin.installModuleStep, tmp_file)
tester3.add_step(tester3.waitStep)
#request work again
srd.add_step(srd.waitStep)
tester3.add_step(tester3.setItemStep, "/test-module:main/string", sr.Val("abcd", sr.SR_STRING_T))
srd.add_step(srd.stopDaemonStep)
tm.add_tester(srd)
tm.add_tester(tester1)
tm.add_tester(tester2)
tm.add_tester(tester3)
tm.add_tester(admin)
tm.run()
if __name__ == '__main__':
unittest.main()
| morganzhh/sysrepo | swig/python3/tests/SchemasManagementTest.py | Python | apache-2.0 | 7,156 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# https://wwoods.github.io/2016/06/09/easy-sphinx-documentation-without-the-boilerplate/
# was immensely helpful *and should be the fucking out-of-the-box default experience* when
# turning on autodoc and autosummary
# http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
# for API docstring formatting
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert( 0, os.path.abspath( '..' ) )
import bjarkan
language = None
project = bjarkan.__title__
copyright = bjarkan.__copyright__
author = bjarkan.__author__
version = bjarkan.__version__
release = version
templates_path = [ '_templates' ]
exclude_patterns = [ '_templates' ]
source_suffix = '.rst'
master_doc = project
pygments_style = 'sphinx'
todo_include_todos = False
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.napoleon'
]
autoclass_content = 'class'
autodoc_member_order = 'bysource'
autodoc_default_flags = [
'members', # include non-private members
'undoc-members', # include members without a docstring
#'private-members', # include _private and __private
#'special-members', # include __special__ members
#'inherited-members', # include members inherited from the base class
'show-inheritance', # include the inheritance hierarchy of a class
]
autodoc_mock_imports = [
'dbus',
'dbus.mainloop.glib',
'dbus.mainloop.glib.DBusGMainLoop',
'dbus.service',
'gi.repository',
'gi.repository.GObject',
'gwn.helpers',
'gwn.helpers.dbus',
'gwn.helpers.dbus.dbuscast',
'gwn.helpers.logger',
'gwn.helpers.logger.logger',
'systemd.journal.JournalHandler'
]
autosummary_generate = True
napoleon_numpy_docstring = False # Force consistency, leave only Google
napoleon_use_rtype = False # More legible
html_theme = 'sphinx_rtd_theme'
| willdeberry/bjarkan | sphinx/conf.py | Python | bsd-3-clause | 2,104 |
import os
import sys
from datetime import datetime
from unittest import TestCase
import tests
import settings
from files import files
import pyunrar
from analyzer.show import ShowAnalyzer
from analyzer.movie import MovieAnalyzer
from models import directory
class ExtractCleanName(TestCase):
def setUp(self):
self.base_path = os.path.join(os.getcwd(), 'mock_downloads_dir')
files.removeall(self.base_path)
rarfile = pyunrar.RarFile(os.path.join(os.getcwd(), 'mock_downloads_dir.rar'))
rarfile.extract(os.getcwd())
settings.DOWNLOADS_FOLDER = self.base_path
settings.FOLDERS_MOVIES = os.path.join(os.getcwd(), 'target_folder', 'movies')
settings.FOLDERS_TV_SHOWS = os.path.join(os.getcwd(), 'target_folder', 'shows')
settings.VERBOSE = True
files.removeall(settings.FOLDERS_MOVIES)
files.removeall(settings.FOLDERS_TV_SHOWS)
def tearDown(self):
files.removeall(self.base_path)
os.rmdir(self.base_path)
def test_remove_watermarks(self):
s = "Toddlers.and.Tiaras.S03E01.Le.Maison.de.Paris.HDTV.XviD-MOMENTUM [NO-RAR] - [ www.torrentday.com ]"
paths = files.find_all_files(os.path.join(self.base_path, s))
path = [p for p in paths][0]
pretty_path = path.get_handler().file.get_pretty_path()
self.failUnless(1 > pretty_path.find("torrentday"), pretty_path)
self.failUnless(1 > pretty_path.lower().find("NO-RAR"))
def test_should_extract_path_parts(self):
paths = files.find_all_files(os.path.join(self.base_path, 'Some.Movie.Archive'))
paths = [p for p in paths]
path = paths[0]
path_parts = path.get_handler().file.get_path_parts()
self.failUnlessEqual(3, len(path_parts), "Was NOT three")
self.failUnlessEqual(
['Some.Random.Movie.Sequel', 'mapsm2.avi', 'Some.Movie.Archive'],
path_parts,
path_parts
)
pretty_path = path.get_handler().file.get_pretty_path()
self.failUnless(pretty_path.startswith(settings.FOLDERS_MOVIES))
movie_folder_name = pretty_path.replace(settings.FOLDERS_MOVIES, '')
self.failUnlessEqual("Some Random Movie Sequel", movie_folder_name.strip('/'))
path = paths[1]
path_parts = path.get_handler().file.get_path_parts()
self.failUnlessEqual(3, len(path_parts), "Was NOT three")
self.failUnlessEqual(
['Some.Random.Movie', 'mapsm.avi', 'Some.Movie.Archive'],
path_parts,
path_parts
)
pretty_path = path.get_handler().file.get_pretty_path()
self.failUnless(pretty_path.startswith(settings.FOLDERS_MOVIES))
movie_folder_name = pretty_path.replace(settings.FOLDERS_MOVIES, '')
self.failUnlessEqual("Some Random Movie", movie_folder_name.strip('/'))
#if path.is_interesting():
# print "Pretty Path", path.get_handler().file.get_pretty_path() | joelbitar/rfinder | tests/extract_clean_name.py | Python | lgpl-3.0 | 3,011 |
"""Config flow for DialogFlow."""
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
config_entry_flow.register_webhook_flow(
DOMAIN,
"Dialogflow Webhook",
{
"dialogflow_url": "https://dialogflow.com/docs/fulfillment#webhook",
"docs_url": "https://www.home-assistant.io/components/dialogflow/",
},
)
| fbradyirl/home-assistant | homeassistant/components/dialogflow/config_flow.py | Python | apache-2.0 | 361 |
# Exercise
#
# Improve "Word Jumble" so that each word is paired with a hint. The player should be able to see the hint if he or
# she is stuck. The player should be able to see the hint if he or she is stuck. Add scoring system that rewards
# players who solve a jumble without asking for the hint.
#
# Word Jumble
#
# The computer picks a random word and then "jumbles" it.
# The player has to guess the original word.
#
import random
# Create a sequence of words to choose from
WORDS = ("python", "jumble", "easy", "difficult", "answer", "xylophone")
# The hint appears after five unsuccessful attempts
HINTS = ("A programming language.",
"The name of this game.",
"Something that is simple to do.",
"Something that is not simple to do.",
"When someone asks you a question it is polite to...",
"A musical instrument.")
# Pick one word randomly from the sequence
i = random.randrange(len(WORDS))
word = WORDS[i]
hint = HINTS[i]
# Score variable
score = 0
# Create a variable to use later to see if the guess is correct
correct = word
# Create a jumbled version of the word
jumble = ""
while word:
position = random.randrange(len(word))
jumble += word[position]
word = word[:position] + word[(position + 1):]
# Start the game
print(
"""
Welcome to the Word Jumble!
Unscramble the letters to make a word.
(Press the enter key at the prompt to quit.)
"""
)
print("The jumble is: ", jumble)
guess = input("\nYour guess: ")
while guess != correct and guess != "":
score -= 1
print("Sorry, that's not it.")
if score == -5:
score -= 25
print("Do you need help? This is the hint: " + hint)
guess = input("Your guess: ")
if guess == correct:
score += 50
print("That's it! You guessed it!\n")
else:
print("You gave up!\n")
print("Your final score is: " + str(score) + " points. Thanks for playing.")
input("\n\nPress the enter key to exit.") | dmartinezgarcia/Python-Programming | Chapter 4 - For loops, strings and tuples/exercise_3.py | Python | gpl-2.0 | 1,932 |
"""Module to help with parsing and generating configuration files."""
from collections import OrderedDict
# pylint: disable=no-name-in-module
from distutils.version import LooseVersion # pylint: disable=import-error
import logging
import os
import re
import shutil
from typing import Any, Tuple, Optional, Dict, Union, Callable, Sequence, Set
from types import ModuleType
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant import auth
from homeassistant.auth import (
providers as auth_providers,
mfa_modules as auth_mfa_modules,
)
from homeassistant.const import (
ATTR_FRIENDLY_NAME,
ATTR_HIDDEN,
ATTR_ASSUMED_STATE,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_PACKAGES,
CONF_UNIT_SYSTEM,
CONF_TIME_ZONE,
CONF_ELEVATION,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_TEMPERATURE_UNIT,
TEMP_CELSIUS,
__version__,
CONF_CUSTOMIZE,
CONF_CUSTOMIZE_DOMAIN,
CONF_CUSTOMIZE_GLOB,
CONF_WHITELIST_EXTERNAL_DIRS,
CONF_AUTH_PROVIDERS,
CONF_AUTH_MFA_MODULES,
CONF_TYPE,
CONF_ID,
)
from homeassistant.core import DOMAIN as CONF_CORE, SOURCE_YAML, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.loader import Integration, IntegrationNotFound
from homeassistant.requirements import (
async_get_integration_with_requirements,
RequirementsNotFound,
)
from homeassistant.util.yaml import load_yaml, SECRET_YAML
from homeassistant.util.package import is_docker_env
import homeassistant.helpers.config_validation as cv
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
from homeassistant.helpers.entity_values import EntityValues
from homeassistant.helpers import config_per_platform, extract_domain_configs
_LOGGER = logging.getLogger(__name__)
DATA_PERSISTENT_ERRORS = "bootstrap_persistent_errors"
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
RE_ASCII = re.compile(r"\033\[[^m]*m")
HA_COMPONENT_URL = "[{}](https://home-assistant.io/components/{}/)"
YAML_CONFIG_FILE = "configuration.yaml"
VERSION_FILE = ".HA_VERSION"
CONFIG_DIR_NAME = ".homeassistant"
DATA_CUSTOMIZE = "hass_customize"
FILE_MIGRATION = (("ios.conf", ".ios.conf"),)
DEFAULT_CONFIG = """
# Configure a default setup of Home Assistant (frontend, api, etc)
default_config:
# Uncomment this if you are using SSL/TLS, running in Docker container, etc.
# http:
# base_url: example.duckdns.org:8123
# Text to speech
tts:
- platform: google_translate
group: !include groups.yaml
automation: !include automations.yaml
script: !include scripts.yaml
"""
DEFAULT_SECRETS = """
# Use this file to store secrets like usernames and passwords.
# Learn more at https://home-assistant.io/docs/configuration/secrets/
some_password: welcome
"""
TTS_PRE_92 = """
tts:
- platform: google
"""
TTS_92 = """
tts:
- platform: google_translate
service_name: google_say
"""
def _no_duplicate_auth_provider(
configs: Sequence[Dict[str, Any]]
) -> Sequence[Dict[str, Any]]:
"""No duplicate auth provider config allowed in a list.
Each type of auth provider can only have one config without optional id.
Unique id is required if same type of auth provider used multiple times.
"""
config_keys: Set[Tuple[str, Optional[str]]] = set()
for config in configs:
key = (config[CONF_TYPE], config.get(CONF_ID))
if key in config_keys:
raise vol.Invalid(
"Duplicate auth provider {} found. Please add unique IDs if "
"you want to have the same auth provider twice".format(
config[CONF_TYPE]
)
)
config_keys.add(key)
return configs
def _no_duplicate_auth_mfa_module(
configs: Sequence[Dict[str, Any]]
) -> Sequence[Dict[str, Any]]:
"""No duplicate auth mfa module item allowed in a list.
Each type of mfa module can only have one config without optional id.
A global unique id is required if same type of mfa module used multiple
times.
Note: this is different than auth provider
"""
config_keys: Set[str] = set()
for config in configs:
key = config.get(CONF_ID, config[CONF_TYPE])
if key in config_keys:
raise vol.Invalid(
"Duplicate mfa module {} found. Please add unique IDs if "
"you want to have the same mfa module twice".format(config[CONF_TYPE])
)
config_keys.add(key)
return configs
PACKAGES_CONFIG_SCHEMA = cv.schema_with_slug_keys( # Package names are slugs
vol.Schema({cv.string: vol.Any(dict, list, None)}) # Component config
)
CUSTOMIZE_DICT_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_HIDDEN): cv.boolean,
vol.Optional(ATTR_ASSUMED_STATE): cv.boolean,
},
extra=vol.ALLOW_EXTRA,
)
CUSTOMIZE_CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema(
{cv.entity_id: CUSTOMIZE_DICT_SCHEMA}
),
vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema(
{cv.string: CUSTOMIZE_DICT_SCHEMA}
),
vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema(
{cv.string: CUSTOMIZE_DICT_SCHEMA}
),
}
)
CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend(
{
CONF_NAME: vol.Coerce(str),
CONF_LATITUDE: cv.latitude,
CONF_LONGITUDE: cv.longitude,
CONF_ELEVATION: vol.Coerce(int),
vol.Optional(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
CONF_UNIT_SYSTEM: cv.unit_system,
CONF_TIME_ZONE: cv.time_zone,
vol.Optional(CONF_WHITELIST_EXTERNAL_DIRS):
# pylint: disable=no-value-for-parameter
vol.All(cv.ensure_list, [vol.IsDir()]),
vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA,
vol.Optional(CONF_AUTH_PROVIDERS): vol.All(
cv.ensure_list,
[
auth_providers.AUTH_PROVIDER_SCHEMA.extend(
{
CONF_TYPE: vol.NotIn(
["insecure_example"],
"The insecure_example auth provider"
" is for testing only.",
)
}
)
],
_no_duplicate_auth_provider,
),
vol.Optional(CONF_AUTH_MFA_MODULES): vol.All(
cv.ensure_list,
[
auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
{
CONF_TYPE: vol.NotIn(
["insecure_example"],
"The insecure_example mfa module" " is for testing only.",
)
}
)
],
_no_duplicate_auth_mfa_module,
),
}
)
def get_default_config_dir() -> str:
"""Put together the default configuration directory based on the OS."""
data_dir = os.getenv("APPDATA") if os.name == "nt" else os.path.expanduser("~")
return os.path.join(data_dir, CONFIG_DIR_NAME) # type: ignore
async def async_ensure_config_exists(
hass: HomeAssistant, config_dir: str
) -> Optional[str]:
"""Ensure a configuration file exists in given configuration directory.
Creating a default one if needed.
Return path to the configuration file.
"""
config_path = find_config_file(config_dir)
if config_path is None:
print("Unable to find configuration. Creating default one in", config_dir)
config_path = await async_create_default_config(hass, config_dir)
return config_path
async def async_create_default_config(
hass: HomeAssistant, config_dir: str
) -> Optional[str]:
"""Create a default configuration file in given configuration directory.
Return path to new config file if success, None if failed.
This method needs to run in an executor.
"""
return await hass.async_add_executor_job(_write_default_config, config_dir)
def _write_default_config(config_dir: str) -> Optional[str]:
"""Write the default config."""
from homeassistant.components.config.group import CONFIG_PATH as GROUP_CONFIG_PATH
from homeassistant.components.config.automation import (
CONFIG_PATH as AUTOMATION_CONFIG_PATH,
)
from homeassistant.components.config.script import CONFIG_PATH as SCRIPT_CONFIG_PATH
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
secret_path = os.path.join(config_dir, SECRET_YAML)
version_path = os.path.join(config_dir, VERSION_FILE)
group_yaml_path = os.path.join(config_dir, GROUP_CONFIG_PATH)
automation_yaml_path = os.path.join(config_dir, AUTOMATION_CONFIG_PATH)
script_yaml_path = os.path.join(config_dir, SCRIPT_CONFIG_PATH)
# Writing files with YAML does not create the most human readable results
# So we're hard coding a YAML template.
try:
with open(config_path, "wt") as config_file:
config_file.write(DEFAULT_CONFIG)
with open(secret_path, "wt") as secret_file:
secret_file.write(DEFAULT_SECRETS)
with open(version_path, "wt") as version_file:
version_file.write(__version__)
with open(group_yaml_path, "wt"):
pass
with open(automation_yaml_path, "wt") as fil:
fil.write("[]")
with open(script_yaml_path, "wt"):
pass
return config_path
except OSError:
print("Unable to create default configuration file", config_path)
return None
async def async_hass_config_yaml(hass: HomeAssistant) -> Dict:
"""Load YAML from a Home Assistant configuration file.
This function allow a component inside the asyncio loop to reload its
configuration by itself. Include package merge.
This method is a coroutine.
"""
def _load_hass_yaml_config() -> Dict:
path = find_config_file(hass.config.config_dir)
if path is None:
raise HomeAssistantError(
f"Config file not found in: {hass.config.config_dir}"
)
config = load_yaml_config_file(path)
return config
# Not using async_add_executor_job because this is an internal method.
config = await hass.loop.run_in_executor(None, _load_hass_yaml_config)
core_config = config.get(CONF_CORE, {})
await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {}))
return config
def find_config_file(config_dir: Optional[str]) -> Optional[str]:
"""Look in given directory for supported configuration files."""
if config_dir is None:
return None
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
return config_path if os.path.isfile(config_path) else None
def load_yaml_config_file(config_path: str) -> Dict[Any, Any]:
"""Parse a YAML configuration file.
Raises FileNotFoundError or HomeAssistantError.
This method needs to run in an executor.
"""
conf_dict = load_yaml(config_path)
if not isinstance(conf_dict, dict):
msg = "The configuration file {} does not contain a dictionary".format(
os.path.basename(config_path)
)
_LOGGER.error(msg)
raise HomeAssistantError(msg)
# Convert values to dictionaries if they are None
for key, value in conf_dict.items():
conf_dict[key] = value or {}
return conf_dict
def process_ha_config_upgrade(hass: HomeAssistant) -> None:
"""Upgrade configuration if necessary.
This method needs to run in an executor.
"""
version_path = hass.config.path(VERSION_FILE)
try:
with open(version_path, "rt") as inp:
conf_version = inp.readline().strip()
except FileNotFoundError:
# Last version to not have this file
conf_version = "0.7.7"
if conf_version == __version__:
return
_LOGGER.info(
"Upgrading configuration directory from %s to %s", conf_version, __version__
)
version_obj = LooseVersion(conf_version)
if version_obj < LooseVersion("0.50"):
# 0.50 introduced persistent deps dir.
lib_path = hass.config.path("deps")
if os.path.isdir(lib_path):
shutil.rmtree(lib_path)
if version_obj < LooseVersion("0.92"):
# 0.92 moved google/tts.py to google_translate/tts.py
config_path = find_config_file(hass.config.config_dir)
assert config_path is not None
with open(config_path, "rt", encoding="utf-8") as config_file:
config_raw = config_file.read()
if TTS_PRE_92 in config_raw:
_LOGGER.info("Migrating google tts to google_translate tts")
config_raw = config_raw.replace(TTS_PRE_92, TTS_92)
try:
with open(config_path, "wt", encoding="utf-8") as config_file:
config_file.write(config_raw)
except OSError:
_LOGGER.exception("Migrating to google_translate tts failed")
pass
if version_obj < LooseVersion("0.94") and is_docker_env():
# In 0.94 we no longer install packages inside the deps folder when
# running inside a Docker container.
lib_path = hass.config.path("deps")
if os.path.isdir(lib_path):
shutil.rmtree(lib_path)
with open(version_path, "wt") as outp:
outp.write(__version__)
_LOGGER.debug("Migrating old system configuration files to new locations")
for oldf, newf in FILE_MIGRATION:
if os.path.isfile(hass.config.path(oldf)):
_LOGGER.info("Migrating %s to %s", oldf, newf)
os.rename(hass.config.path(oldf), hass.config.path(newf))
@callback
def async_log_exception(
ex: Exception, domain: str, config: Dict, hass: HomeAssistant
) -> None:
"""Log an error for configuration validation.
This method must be run in the event loop.
"""
if hass is not None:
async_notify_setup_error(hass, domain, True)
_LOGGER.error(_format_config_error(ex, domain, config))
@callback
def _format_config_error(ex: Exception, domain: str, config: Dict) -> str:
"""Generate log exception for configuration validation.
This method must be run in the event loop.
"""
message = f"Invalid config for [{domain}]: "
if isinstance(ex, vol.Invalid):
if "extra keys not allowed" in ex.error_message:
message += (
"[{option}] is an invalid option for [{domain}]. "
"Check: {domain}->{path}.".format(
option=ex.path[-1],
domain=domain,
path="->".join(str(m) for m in ex.path),
)
)
else:
message += "{}.".format(humanize_error(config, ex))
else:
message += str(ex)
try:
domain_config = config.get(domain, config)
except AttributeError:
domain_config = config
message += " (See {}, line {}). ".format(
getattr(domain_config, "__config_file__", "?"),
getattr(domain_config, "__line__", "?"),
)
if domain != CONF_CORE:
message += (
"Please check the docs at "
"https://home-assistant.io/components/{}/".format(domain)
)
return message
async def async_process_ha_core_config(
hass: HomeAssistant,
config: Dict,
api_password: Optional[str] = None,
trusted_networks: Optional[Any] = None,
) -> None:
"""Process the [homeassistant] section from the configuration.
This method is a coroutine.
"""
config = CORE_CONFIG_SCHEMA(config)
# Only load auth during startup.
if not hasattr(hass, "auth"):
auth_conf = config.get(CONF_AUTH_PROVIDERS)
if auth_conf is None:
auth_conf = [{"type": "homeassistant"}]
if api_password:
auth_conf.append(
{"type": "legacy_api_password", "api_password": api_password}
)
if trusted_networks:
auth_conf.append(
{"type": "trusted_networks", "trusted_networks": trusted_networks}
)
mfa_conf = config.get(
CONF_AUTH_MFA_MODULES,
[{"type": "totp", "id": "totp", "name": "Authenticator app"}],
)
setattr(
hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf)
)
await hass.config.async_load()
hac = hass.config
if any(
[
k in config
for k in [
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_ELEVATION,
CONF_TIME_ZONE,
CONF_UNIT_SYSTEM,
]
]
):
hac.config_source = SOURCE_YAML
for key, attr in (
(CONF_LATITUDE, "latitude"),
(CONF_LONGITUDE, "longitude"),
(CONF_NAME, "location_name"),
(CONF_ELEVATION, "elevation"),
):
if key in config:
setattr(hac, attr, config[key])
if CONF_TIME_ZONE in config:
hac.set_time_zone(config[CONF_TIME_ZONE])
# Init whitelist external dir
hac.whitelist_external_dirs = {hass.config.path("www")}
if CONF_WHITELIST_EXTERNAL_DIRS in config:
hac.whitelist_external_dirs.update(set(config[CONF_WHITELIST_EXTERNAL_DIRS]))
# Customize
cust_exact = dict(config[CONF_CUSTOMIZE])
cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN])
cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB])
for name, pkg in config[CONF_PACKAGES].items():
pkg_cust = pkg.get(CONF_CORE)
if pkg_cust is None:
continue
try:
pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust)
except vol.Invalid:
_LOGGER.warning("Package %s contains invalid customize", name)
continue
cust_exact.update(pkg_cust[CONF_CUSTOMIZE])
cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN])
cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB])
hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob)
if CONF_UNIT_SYSTEM in config:
if config[CONF_UNIT_SYSTEM] == CONF_UNIT_SYSTEM_IMPERIAL:
hac.units = IMPERIAL_SYSTEM
else:
hac.units = METRIC_SYSTEM
elif CONF_TEMPERATURE_UNIT in config:
unit = config[CONF_TEMPERATURE_UNIT]
if unit == TEMP_CELSIUS:
hac.units = METRIC_SYSTEM
else:
hac.units = IMPERIAL_SYSTEM
_LOGGER.warning(
"Found deprecated temperature unit in core "
"configuration expected unit system. Replace '%s: %s' "
"with '%s: %s'",
CONF_TEMPERATURE_UNIT,
unit,
CONF_UNIT_SYSTEM,
hac.units.name,
)
def _log_pkg_error(package: str, component: str, config: Dict, message: str) -> None:
"""Log an error while merging packages."""
message = "Package {} setup failed. Integration {} {}".format(
package, component, message
)
pack_config = config[CONF_CORE][CONF_PACKAGES].get(package, config)
message += " (See {}:{}). ".format(
getattr(pack_config, "__config_file__", "?"),
getattr(pack_config, "__line__", "?"),
)
_LOGGER.error(message)
def _identify_config_schema(module: ModuleType) -> Tuple[Optional[str], Optional[Dict]]:
"""Extract the schema and identify list or dict based."""
try:
schema = module.CONFIG_SCHEMA.schema[module.DOMAIN] # type: ignore
except (AttributeError, KeyError):
return None, None
t_schema = str(schema)
if t_schema.startswith("{") or "schema_with_slug_keys" in t_schema:
return ("dict", schema)
if t_schema.startswith(("[", "All(<function ensure_list")):
return ("list", schema)
return "", schema
def _recursive_merge(conf: Dict[str, Any], package: Dict[str, Any]) -> Union[bool, str]:
"""Merge package into conf, recursively."""
error: Union[bool, str] = False
for key, pack_conf in package.items():
if isinstance(pack_conf, dict):
if not pack_conf:
continue
conf[key] = conf.get(key, OrderedDict())
error = _recursive_merge(conf=conf[key], package=pack_conf)
elif isinstance(pack_conf, list):
conf[key] = cv.remove_falsy(
cv.ensure_list(conf.get(key)) + cv.ensure_list(pack_conf)
)
else:
if conf.get(key) is not None:
return key
conf[key] = pack_conf
return error
async def merge_packages_config(
hass: HomeAssistant,
config: Dict,
packages: Dict[str, Any],
_log_pkg_error: Callable = _log_pkg_error,
) -> Dict:
"""Merge packages into the top-level configuration. Mutate config."""
# pylint: disable=too-many-nested-blocks
PACKAGES_CONFIG_SCHEMA(packages)
for pack_name, pack_conf in packages.items():
for comp_name, comp_conf in pack_conf.items():
if comp_name == CONF_CORE:
continue
# If component name is given with a trailing description, remove it
# when looking for component
domain = comp_name.split(" ")[0]
try:
integration = await async_get_integration_with_requirements(
hass, domain
)
component = integration.get_component()
except (IntegrationNotFound, RequirementsNotFound, ImportError) as ex:
_log_pkg_error(pack_name, comp_name, config, str(ex))
continue
merge_list = hasattr(component, "PLATFORM_SCHEMA")
if not merge_list and hasattr(component, "CONFIG_SCHEMA"):
merge_type, _ = _identify_config_schema(component)
merge_list = merge_type == "list"
if merge_list:
config[comp_name] = cv.remove_falsy(
cv.ensure_list(config.get(comp_name)) + cv.ensure_list(comp_conf)
)
continue
if comp_conf is None:
comp_conf = OrderedDict()
if not isinstance(comp_conf, dict):
_log_pkg_error(
pack_name, comp_name, config, "cannot be merged. Expected a dict."
)
continue
if comp_name not in config or config[comp_name] is None:
config[comp_name] = OrderedDict()
if not isinstance(config[comp_name], dict):
_log_pkg_error(
pack_name,
comp_name,
config,
"cannot be merged. Dict expected in main config.",
)
continue
error = _recursive_merge(conf=config[comp_name], package=comp_conf)
if error:
_log_pkg_error(
pack_name, comp_name, config, f"has duplicate key '{error}'"
)
return config
async def async_process_component_config(
hass: HomeAssistant, config: Dict, integration: Integration
) -> Optional[Dict]:
"""Check component configuration and return processed configuration.
Returns None on error.
This method must be run in the event loop.
"""
domain = integration.domain
try:
component = integration.get_component()
except ImportError as ex:
_LOGGER.error("Unable to import %s: %s", domain, ex)
return None
# Check if the integration has a custom config validator
config_validator = None
try:
config_validator = integration.get_platform("config")
except ImportError:
pass
if config_validator is not None and hasattr(
config_validator, "async_validate_config"
):
try:
return await config_validator.async_validate_config( # type: ignore
hass, config
)
except (vol.Invalid, HomeAssistantError) as ex:
async_log_exception(ex, domain, config, hass)
return None
# No custom config validator, proceed with schema validation
if hasattr(component, "CONFIG_SCHEMA"):
try:
return component.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as ex:
async_log_exception(ex, domain, config, hass)
return None
component_platform_schema = getattr(
component, "PLATFORM_SCHEMA_BASE", getattr(component, "PLATFORM_SCHEMA", None)
)
if component_platform_schema is None:
return config
platforms = []
for p_name, p_config in config_per_platform(config, domain):
# Validate component specific platform schema
try:
p_validated = component_platform_schema(p_config)
except vol.Invalid as ex:
async_log_exception(ex, domain, p_config, hass)
continue
# Not all platform components follow same pattern for platforms
# So if p_name is None we are not going to validate platform
# (the automation component is one of them)
if p_name is None:
platforms.append(p_validated)
continue
try:
p_integration = await async_get_integration_with_requirements(hass, p_name)
except (RequirementsNotFound, IntegrationNotFound) as ex:
_LOGGER.error("Platform error: %s - %s", domain, ex)
continue
try:
platform = p_integration.get_platform(domain)
except ImportError:
_LOGGER.exception("Platform error: %s", domain)
continue
# Validate platform specific schema
if hasattr(platform, "PLATFORM_SCHEMA"):
# pylint: disable=no-member
try:
p_validated = platform.PLATFORM_SCHEMA( # type: ignore
p_config
)
except vol.Invalid as ex:
async_log_exception(ex, f"{domain}.{p_name}", p_config, hass)
continue
platforms.append(p_validated)
# Create a copy of the configuration with all config for current
# component removed and add validated config back in.
config = config_without_domain(config, domain)
config[domain] = platforms
return config
@callback
def config_without_domain(config: Dict, domain: str) -> Dict:
"""Return a config with all configuration for a domain removed."""
filter_keys = extract_domain_configs(config, domain)
return {key: value for key, value in config.items() if key not in filter_keys}
async def async_check_ha_config_file(hass: HomeAssistant) -> Optional[str]:
"""Check if Home Assistant configuration file is valid.
This method is a coroutine.
"""
import homeassistant.helpers.check_config as check_config
res = await check_config.async_check_ha_config_file(hass)
if not res.errors:
return None
return res.error_str
@callback
def async_notify_setup_error(
hass: HomeAssistant, component: str, display_link: bool = False
) -> None:
"""Print a persistent notification.
This method must be run in the event loop.
"""
from homeassistant.components import persistent_notification
errors = hass.data.get(DATA_PERSISTENT_ERRORS)
if errors is None:
errors = hass.data[DATA_PERSISTENT_ERRORS] = {}
errors[component] = errors.get(component) or display_link
message = "The following components and platforms could not be set up:\n\n"
for name, link in errors.items():
if link:
part = HA_COMPONENT_URL.format(name.replace("_", "-"), name)
else:
part = name
message += f" - {part}\n"
message += "\nPlease check your config."
persistent_notification.async_create(
hass, message, "Invalid config", "invalid_config"
)
| Cinntax/home-assistant | homeassistant/config.py | Python | apache-2.0 | 28,081 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import torch
import tqdm
from torchsde import BaseBrownian, BaseSDE, sdeint
from torchsde.settings import SDE_TYPES
from torchsde.types import Tensor, Vector, Scalar, Tuple, Optional, Callable
from . import utils
sys.setrecursionlimit(5000)
@torch.no_grad()
def inspect_samples(y0: Tensor,
ts: Vector,
dt: Scalar,
sde: BaseSDE,
bm: BaseBrownian,
img_dir: str,
methods: Tuple[str, ...],
options: Optional[Tuple] = None,
labels: Optional[Tuple[str, ...]] = None,
vis_dim=0,
dt_true: Optional[float] = 2 ** -14):
if options is None:
options = (None,) * len(methods)
if labels is None:
labels = methods
solns = [
sdeint(sde, y0, ts, bm, method=method, dt=dt, options=options_)
for method, options_ in zip(methods, options)
]
method_for_true = 'euler' if sde.sde_type == SDE_TYPES.ito else 'midpoint'
true = sdeint(sde, y0, ts, bm, method=method_for_true, dt=dt_true)
labels += ('true',)
solns += [true]
# (T, batch_size, d) -> (T, batch_size) -> (batch_size, T).
solns = [soln[..., vis_dim].t() for soln in solns]
for i, samples in enumerate(zip(*solns)):
utils.swiss_knife_plotter(
img_path=os.path.join(img_dir, f'{i}'),
plots=[
{'x': ts, 'y': sample, 'label': label, 'marker': 'x'}
for sample, label in zip(samples, labels)
]
)
@torch.no_grad()
def inspect_orders(y0: Tensor,
t0: Scalar,
t1: Scalar,
dts: Vector,
sde: BaseSDE,
bm: BaseBrownian,
img_dir: str,
methods: Tuple[str, ...],
options: Optional[Tuple] = None,
labels: Optional[Tuple[str, ...]] = None,
dt_true: Optional[float] = 2 ** -14,
test_func: Optional[Callable] = lambda x: (x ** 2).flatten(start_dim=1).sum(dim=1)):
if options is None:
options = (None,) * len(methods)
if labels is None:
labels = methods
ts = torch.tensor([t0, t1], device=y0.device)
solns = [
[
sdeint(sde, y0, ts, bm, method=method, dt=dt, options=options_)[-1]
for method, options_ in zip(methods, options)
]
for dt in tqdm.tqdm(dts)
]
if hasattr(sde, 'analytical_sample'):
true = sde.analytical_sample(y0, ts, bm)[-1]
else:
method_for_true = 'euler' if sde.sde_type == SDE_TYPES.ito else 'midpoint'
true = sdeint(sde, y0, ts, bm, method=method_for_true, dt=dt_true)[-1]
mses = []
maes = []
for dt, solns_ in zip(dts, solns):
mses_for_dt = [utils.mse(soln, true) for soln in solns_]
mses.append(mses_for_dt)
maes_for_dt = [utils.mae(soln, true, test_func) for soln in solns_]
maes.append(maes_for_dt)
strong_order_slopes = [
utils.linregress_slope(utils.log(dts), .5 * utils.log(mses_for_method))
for mses_for_method in zip(*mses)
]
weak_order_slopes = [
utils.linregress_slope(utils.log(dts), utils.log(maes_for_method))
for maes_for_method in zip(*maes)
]
utils.swiss_knife_plotter(
img_path=os.path.join(img_dir, 'strong_order'),
plots=[
{'x': dts, 'y': mses_for_method, 'label': f'{label}(k={slope:.4f})', 'marker': 'x'}
for mses_for_method, label, slope in zip(zip(*mses), labels, strong_order_slopes)
],
options={'xscale': 'log', 'yscale': 'log', 'cycle_line_style': True}
)
utils.swiss_knife_plotter(
img_path=os.path.join(img_dir, 'weak_order'),
plots=[
{'x': dts, 'y': mres_for_method, 'label': f'{label}(k={slope:.4f})', 'marker': 'x'}
for mres_for_method, label, slope in zip(zip(*maes), labels, weak_order_slopes)
],
options={'xscale': 'log', 'yscale': 'log', 'cycle_line_style': True}
)
| google-research/torchsde | diagnostics/inspection.py | Python | apache-2.0 | 4,759 |
from django.conf.urls import patterns, url
from quenta.storybook import views
urlpatterns = patterns('',
url(r'^(?P<story_id>\d+/)', views.story, name='story'),#+ parametar
url(r'^(?P<story_id>\d+/html/)', views.story_html, name='story_html'),#+ parametar
url(r'^$', views.stories_list, name='stories_list'),
)
| metakermit/quenta | quenta/storybook/urls.py | Python | agpl-3.0 | 325 |
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Provides an HTML cleaner function with sqalchemy compatible API"""
from HTMLParser import HTMLParser
import bleach
# Set up custom tags/attributes for bleach
BLEACH_TAGS = [
'caption', 'strong', 'em', 'b', 'i', 'p', 'code', 'pre', 'tt', 'samp',
'kbd', 'var', 'sub', 'sup', 'dfn', 'cite', 'big', 'small', 'address',
'hr', 'br', 'div', 'span', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'ul',
'ol', 'li', 'dl', 'dt', 'dd', 'abbr', 'acronym', 'a', 'img',
'blockquote', 'del', 'ins', 'table', 'tbody', 'tr', 'td', 'th',
] + bleach.ALLOWED_TAGS
BLEACH_ATTRS = {}
ATTRS = [
'href', 'src', 'width', 'height', 'alt', 'cite', 'datetime',
'title', 'class', 'name', 'xml:lang', 'abbr'
]
for tag in BLEACH_TAGS:
BLEACH_ATTRS[tag] = ATTRS
def cleaner(dummy, value, *_):
"""Cleans out unsafe HTML tags.
Uses bleach and unescape until it reaches a fix point.
Args:
dummy: unused, sqalchemy will pass in the model class
value: html (string) to be cleaned
Returns:
Html (string) without unsafe tags.
"""
# Some cases like Request don't use the title value
# and it's nullable, so check for that
if value is None:
return value
parser = HTMLParser()
value = unicode(value)
while True:
lastvalue = value
value = parser.unescape(
bleach.clean(value, BLEACH_TAGS, BLEACH_ATTRS, strip=True)
)
if value == lastvalue:
break
return value
| edofic/ggrc-core | src/ggrc/utils/html_cleaner.py | Python | apache-2.0 | 1,534 |
# Copyright (c) 2004-2007 Divmod.
# See LICENSE for details.
"""
Tests for L{nevow.json}.
"""
from zope.interface import implements
from nevow.inevow import IAthenaTransportable
from nevow import json, rend, page, loaders, tags, athena, testutil
from twisted.trial import unittest
TEST_OBJECTS = [
0,
None,
True,
False,
[],
[0],
[0, 1, 2],
[None, 1, 2],
[None, u'one', 2],
[True, False, u'string', 10],
[[1, 2], [3, 4]],
[[1.5, 2.5], [3.5, 4.5]],
[0, [1, 2], [u'hello'], [u'world'], [True, None, False]],
{},
{u'foo': u'bar'},
{u'foo': None},
{u'bar': True},
{u'baz': [1, 2, 3]},
{u'quux': {u'bar': u'foo'}},
]
TEST_STRINGLIKE_OBJECTS = [
u'',
u'string',
u'string with "embedded" quotes',
u"string with 'embedded' single-quotes",
u'string with \\"escaped embedded\\" quotes',
u"string with \\'escaped embedded\\' single-quotes",
u"string with backslashes\\\\",
u"string with trailing accented vowels: \xe1\xe9\xed\xf3\xfa\xfd\xff",
u"string with trailing control characters: \f\b\n\t\r",
u'string with high codepoint characters: \u0111\u2222\u3333\u4444\uffff',
u'string with very high codepoint characters: \U00011111\U00022222\U00033333\U00044444\U000fffff',
]
class DummyLivePage(object):
"""
Stand-in for L{athena.LivePage} which implements only enough of its
behavior so that a L{LiveFragment} or L{LiveElement} can be set as its
child and flattened.
"""
localCounter = 0
def __init__(self):
self.page = self
self.liveFragmentChildren = []
self._jsDepsMemo = {}
self._cssDepsMemo = {}
self._didConnect = True
def addLocalObject(self, obj):
"""
Return an Athena ID for the given object. Returns a new value on every
call.
"""
self.localCounter += 1
return self.localCounter
def _shouldInclude(self, module):
"""
Stub module-system method. Always declares that the given module
should not be included.
"""
return False
class JavascriptObjectNotationTestCase(unittest.TestCase):
def testSerialize(self):
for struct in TEST_OBJECTS:
json.serialize(struct)
def testRoundtrip(self):
for struct in TEST_OBJECTS:
bytes = json.serialize(struct)
unstruct = json.parse(bytes)
self.assertEquals(
unstruct, struct,
"Failed to roundtrip %r: %r (through %r)" % (
struct, unstruct, bytes))
def testStringlikeRountrip(self):
for struct in TEST_STRINGLIKE_OBJECTS:
bytes = json.serialize(struct)
unstruct = json.parse(bytes)
failMsg = "Failed to roundtrip %r: %r (through %r)" % (
struct, unstruct, bytes)
self.assertEquals(unstruct, struct, failMsg)
self.assert_(isinstance(unstruct, unicode), failMsg)
def test_lineTerminators(self):
"""
When passed a unicode string containing a line terminator,
L{json.serialize} emits an escape sequence representing that character
(not a UTF-8 sequence directly representing that the line terminator
code point).
Literal line terminators are allowed in JSON, but some parsers do not
handle them properly.
"""
# These are the four line terminators currently in Unicode.
self.assertEqual('"\\r"', json.serialize(u"\r"))
self.assertEqual('"\\n"', json.serialize(u"\n"))
self.assertEqual('"\\u2028"', json.serialize(u"\u2028"))
self.assertEqual('"\\u2029"', json.serialize(u"\u2029"))
def testScientificNotation(self):
self.assertEquals(json.parse('1e10'), 10**10)
self.assertEquals(json.parse('1e0'), 1)
def testHexEscapedCodepoints(self):
self.assertEquals(
json.parse('"\\xe1\\xe9\\xed\\xf3\\xfa\\xfd"'),
u"\xe1\xe9\xed\xf3\xfa\xfd")
def testEscapedControls(self):
self.assertEquals(
json.parse('"\\f\\b\\n\\t\\r"'),
u"\f\b\n\t\r")
def _rendererTest(self, cls):
self.assertEquals(
json.serialize(
cls(
docFactory=loaders.stan(tags.p['Hello, world.']))),
'"<div xmlns=\\"http://www.w3.org/1999/xhtml\\"><p>Hello, world.</p></div>"')
def test_fragmentSerialization(self):
"""
Test that instances of L{nevow.rend.Fragment} serialize as an xhtml
string.
"""
return self._rendererTest(rend.Fragment)
def test_elementSerialization(self):
"""
Test that instances of L{nevow.page.Element} serialize as an xhtml
string.
"""
return self._rendererTest(page.Element)
def _doubleSerialization(self, cls):
fragment = cls(docFactory=loaders.stan(tags.div['Hello']))
self.assertEqual(
json.serialize(fragment),
json.serialize(fragment))
def test_doubleFragmentSerialization(self):
"""
Test that repeatedly calling L{json.serialize} with an instance of
L{rend.Fragment} results in the same result each time.
"""
return self._doubleSerialization(rend.Fragment)
def test_doubleElementSerialization(self):
"""
Like L{test_doubleElementSerialization} but for L{page.Element}
instances.
"""
return self._doubleSerialization(page.Element)
def _doubleLiveSerialization(self, cls, renderer):
livePage = DummyLivePage()
liveFragment = cls(
docFactory=loaders.stan(
[tags.div(render=tags.directive(renderer))['Hello'],
tags.div(render=tags.directive('foo'))]))
liveFragment.setFragmentParent(livePage)
self.assertEqual(
json.serialize(liveFragment),
json.serialize(liveFragment))
def test_doubleLiveFragmentSerialization(self):
"""
Like L{test_doubleFragmentSerialization} but for L{athena.LiveFragment}
instances.
"""
class AnyLiveFragment(athena.LiveFragment):
"""
Just some L{LiveFragment} subclass, such as an application might
define.
"""
def render_foo(self, ctx, data):
return ctx.tag
self._doubleLiveSerialization(AnyLiveFragment, 'liveFragment')
def test_doubleLiveElementSerialization(self):
"""
Like L{test_doubleFragmentSerialization} but for L{athena.LiveElement}
instances.
"""
requests = []
class AnyLiveElement(athena.LiveElement):
"""
Just some L{LiveElement} subclass, such as an application might
define.
"""
def foo(self, request, tag):
requests.append(request)
return tag
page.renderer(foo)
self._doubleLiveSerialization(AnyLiveElement, 'liveElement')
self.assertTrue(isinstance(requests[0], testutil.FakeRequest))
def test_unsupportedSerialization(self):
"""
L{json.serialize} should raise a L{TypeError} if it is passed an object
which it does not know how to serialize.
"""
class Unsupported(object):
def __repr__(self):
return 'an unsupported object'
exception = self.assertRaises(TypeError, json.serialize, Unsupported())
self.assertEqual(
str(exception),
"Unsupported type <class 'nevow.test.test_json.Unsupported'>: "
"an unsupported object")
def test_customSerialization(self):
"""
L{json.serialize} should emit JavaScript calls to the JavaScript object
named by L{IAthenaTransportable.jsClass} with the arguments returned by
L{IAthenaTransportable.getInitialArguments} when passed an object which
can be adapted to L{IAthenaTransportable}.
"""
class Transportable(object):
"""
Completely parameterized L{IAthenaTransportable} implementation so
different data can be easily tested.
"""
implements(IAthenaTransportable)
def __init__(self, jsClass, initialArgs):
self.jsClass = jsClass
self.getInitialArguments = lambda: initialArgs
self.assertEqual(
json.serialize(Transportable(u"Foo", ())),
"(new Foo())")
self.assertEqual(
json.serialize(Transportable(u"Bar", (None,))),
"(new Bar(null))")
self.assertEqual(
json.serialize(Transportable(u"Baz.Quux", (1, 2))),
"(new Baz.Quux(1,2))")
# The style of the quotes in this assertion is basically irrelevant.
# If, for some reason, the serializer changes to use ' instead of ",
# there's no reason not to change this test to reflect that. -exarkun
self.assertEqual(
json.serialize(Transportable(u"Quux", (u"Foo",))),
'(new Quux("Foo"))')
| UstadMobile/exelearning-ustadmobile-work | nevow/test/test_json.py | Python | gpl-2.0 | 9,209 |
# Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING, Union, Sequence
from typing_extensions import Protocol
from cirq import ops
from cirq._doc import doc_private
from cirq.type_workarounds import NotImplementedType
if TYPE_CHECKING:
import cirq
class SupportsActOn(Protocol):
"""An object that explicitly specifies how to act on simulator states."""
@doc_private
def _act_on_(self, args: 'cirq.OperationTarget') -> Union[NotImplementedType, bool]:
"""Applies an action to the given argument, if it is a supported type.
For example, unitary operations can implement an `_act_on_` method that
checks if `isinstance(args, cirq.ActOnStateVectorArgs)` and, if so,
apply their unitary effect to the state vector.
The global `cirq.act_on` method looks for whether or not the given
argument has this value, before attempting any fallback strategies
specified by the argument being acted on.
This should only be implemented on `Operation` subclasses. Others such
as gates should use `SupportsActOnQubits`.
Args:
args: An object of unspecified type. The method must check if this
object is of a recognized type and act on it if so.
Returns:
True: The receiving object (`self`) acted on the argument.
NotImplemented: The receiving object did not act on the argument.
All other return values are considered to be errors.
"""
class SupportsActOnQubits(Protocol):
"""An object that explicitly specifies how to act on specific qubits."""
@doc_private
def _act_on_(
self,
args: 'cirq.OperationTarget',
qubits: Sequence['cirq.Qid'],
) -> Union[NotImplementedType, bool]:
"""Applies an action to the given argument, if it is a supported type.
For example, unitary operations can implement an `_act_on_` method that
checks if `isinstance(args, cirq.ActOnStateVectorArgs)` and, if so,
apply their unitary effect to the state vector.
The global `cirq.act_on` method looks for whether or not the given
argument has this value, before attempting any fallback strategies
specified by the argument being acted on.
If implementing this on an `Operation`, use `SupportsActOn` instead.
Args:
args: An object of unspecified type. The method must check if this
object is of a recognized type and act on it if so.
qubits: The sequence of qubits to use when applying the action.
Returns:
True: The receiving object (`self`) acted on the argument.
NotImplemented: The receiving object did not act on the argument.
All other return values are considered to be errors.
"""
def act_on(
action: Union['cirq.Operation', 'cirq.Gate'],
args: 'cirq.OperationTarget',
qubits: Sequence['cirq.Qid'] = None,
*,
allow_decompose: bool = True,
):
"""Applies an action to a state argument.
For example, the action may be a `cirq.Operation` and the state argument may
represent the internal state of a state vector simulator (a
`cirq.ActOnStateVectorArgs`).
For non-operations, the `qubits` argument must be explicitly supplied.
The action is applied by first checking if `action._act_on_` exists and
returns `True` (instead of `NotImplemented`) for the given object. Then
fallback strategies specified by the state argument via `_act_on_fallback_`
are attempted. If those also fail, the method fails with a `TypeError`.
Args:
action: The operation, gate, or other to apply to the state tensor.
args: A mutable state object that should be modified by the action. May
specify an `_act_on_fallback_` method to use in case the action
doesn't recognize it.
qubits: The sequence of qubits to use when applying the action.
allow_decompose: Defaults to True. Forwarded into the
`_act_on_fallback_` method of `args`. Determines if decomposition
should be used or avoided when attempting to act `action` on `args`.
Used by internal methods to avoid redundant decompositions.
Returns:
Nothing. Results are communicated by editing `args`.
Raises:
ValueError: If called on an operation and supplied qubits, if not called
on an operation and no qubits are supplied, or if `_act_on_` or
`_act_on_fallback_` returned something other than `True` or
`NotImplemented`.
TypeError: Failed to act `action` on `args`.
"""
is_op = isinstance(action, ops.Operation)
if is_op and qubits is not None:
raise ValueError('Calls to act_on should not supply qubits if the action is an Operation.')
if not is_op and qubits is None:
raise ValueError('Calls to act_on should supply qubits if the action is not an Operation.')
action_act_on = getattr(action, '_act_on_', None)
if action_act_on is not None:
result = action_act_on(args) if is_op else action_act_on(args, qubits)
if result is True:
return
if result is not NotImplemented:
raise ValueError(
f'_act_on_ must return True or NotImplemented but got '
f'{result!r} from {action!r}._act_on_'
)
arg_fallback = getattr(args, '_act_on_fallback_', None)
if arg_fallback is not None:
qubits = action.qubits if isinstance(action, ops.Operation) else qubits
result = arg_fallback(action, qubits=qubits, allow_decompose=allow_decompose)
if result is True:
return
if result is not NotImplemented:
raise ValueError(
f'_act_on_fallback_ must return True or NotImplemented but got '
f'{result!r} from {type(args)}._act_on_fallback_'
)
raise TypeError(
"Failed to act action on state argument.\n"
"Tried both action._act_on_ and args._act_on_fallback_.\n"
"\n"
f"State argument type: {type(args)}\n"
f"Action type: {type(action)}\n"
f"Action repr: {action!r}\n"
)
| quantumlib/Cirq | cirq-core/cirq/protocols/act_on_protocol.py | Python | apache-2.0 | 6,823 |
from selenium.common.exceptions import NoSuchElementException, TimeoutException
class DomHelper(object):
driver = None
waiter = None
def open_page(self, url):
self.driver.get(url)
def reload_page(self):
self.driver.refresh()
def print_el(self, element):
print 'tag: ' + element.tag_name + ' id: ' + element.get_attribute('id') + ' class: ' + element.get_attribute('class') + ' text: ' + element.text
def get_el(self, selector):
if isinstance(selector, (str, unicode)):
return self.driver.find_element_by_css_selector(selector)
else:
return selector
def get_els(self, selector):
if isinstance(selector, (str, unicode)):
return self.driver.find_elements_by_css_selector(selector)
else:
return selector
def get_child_el(self, parent, selector):
try:
return parent.find_element_by_css_selector(selector)
except NoSuchElementException:
return None
def get_child_els(self, parent, selector):
return parent.find_elements_by_css_selector(selector)
def is_el_present(self, selector):
try:
self.driver.find_element_by_css_selector(selector)
return True
except NoSuchElementException:
return False
def verify_el_present(self, selector):
if not self.is_el_present(selector):
raise Exception('Element %s not found' % selector)
def is_el_visible(self, selector):
return self.get_el(selector).is_displayed()
def click_button(self, selector):
if self.driver.name == 'iPhone':
self.driver.execute_script('$("%s").trigger("tap")' % (selector))
else:
self.get_el(selector).click()
def enter_text_field(self, selector, text):
text_field = self.get_el(selector)
text_field.clear()
text_field.send_keys(text)
def select_checkbox(self, selector, name, deselect=False):
found_checkbox = False
checkboxes = self.get_els(selector)
for checkbox in checkboxes:
if checkbox.get_attribute('name') == name:
found_checkbox = True
if not deselect and not checkbox.is_selected():
checkbox.click()
if deselect and checkbox.is_selected():
checkbox.click()
if not found_checkbox:
raise Exception('Checkbox %s not found.' % (name))
def select_option(self, selector, value):
found_option = False
options = self.get_els(selector)
for option in options:
if option.get_attribute('value') == str(value):
found_option = True
option.click()
if not found_option:
raise Exception('Option %s not found' % (value))
def get_selected_option(self, selector):
options = self.get_els(selector)
for option in options:
if option.is_selected():
return option.get_attribute('value')
def is_option_selected(self, selector, value):
options = self.get_els(selector)
for option in options:
if option.is_selected() != (value == option.get_attribute('value')):
print option.get_attribute('value')
return False
return True
def is_text_equal(self, selector, text):
return self.get_el(selector).text == text
def verify_inputs_checked(self, selector, checked):
checkboxes = self.get_els(selector)
for checkbox in checkboxes:
name = checkbox.get_attribute('name')
if checkbox.is_selected() != (name in checked):
raise Exception('Input isnt checked as expected - %s' % (name))
def verify_option_selected(self, selector, value):
if not self.is_option_selected(selector, value):
raise Exception('Option isnt selected as expected')
def verify_radio_value(self, selector, value):
value = str(value)
radios = self.get_els(selector)
for radio in radios:
radio_value = radio.get_attribute('value')
if radio.is_selected() and radio_value != value:
raise Exception('Radio with value %s is checked and shouldnt be' % radio_value)
elif not radio.is_selected() and radio_value == value:
raise Exception('Radio with value %s isnt checked and should be' % radio_value)
def verify_text_field(self, selector, text):
text_field = self.get_el(selector)
value = text_field.get_attribute('value')
if value != text:
raise Exception('Text field contains %s, not %s' % (value, text))
def verify_text_value(self, selector, value):
text_field = self.get_el(selector)
if text_field.get_attribute('value') != value:
raise Exception('Value of %s not equal to "%s" - instead saw "%s"' % (selector, value, text_field.get_attribute('value')))
def verify_text_of_el(self, selector, text):
if not self.is_text_equal(selector, text):
raise Exception('Text of %s not equal to "%s" - instead saw "%s"' % (selector, text, self.get_el(selector).text))
def verify_text_in_els(self, selector, text):
els = self.get_els(selector)
found_text = False
for el in els:
if text in el.text:
found_text = True
if not found_text:
raise Exception('Didnt find text: %s' % (text))
def verify_text_not_in_els(self, selector, text):
els = self.get_els(selector)
found_text = False
for el in els:
if text in el.text:
found_text = True
if found_text:
raise Exception('Found text: %s' % (text))
def is_button_enabled(self, selector):
return (self.get_el(selector).get_attribute('disabled') == 'false')
def check_title(self, title):
return self.driver.title == title or self.driver.title == 'eatdifferent.com: ' + title
def wait_for(self, condition):
self.waiter.until(lambda driver: condition())
def check_num(self, selector, num):
els = self.get_els(selector)
return len(els) == num
def wait_for_num_els(self, selector, num):
try:
self.waiter.until(lambda driver: self.check_num(selector, num))
except TimeoutException:
raise Exception('Never saw %s number of els for %s' % (num, selector))
def wait_for_visible(self, selector):
try:
self.waiter.until(lambda driver: self.is_el_visible(selector))
except TimeoutException:
raise Exception('Never saw element %s become visible' % (selector))
def wait_for_hidden(self, selector):
try:
self.waiter.until(lambda driver: not self.is_el_visible(selector))
except TimeoutException:
raise Exception('Never saw element %s become hidden' % (selector))
def wait_for_button(self, selector):
try:
self.waiter.until(lambda driver: self.is_button_enabled(selector))
except TimeoutException:
raise Exception('Never saw button %s enabled' % (selector))
def wait_for_text(self, selector, text):
try:
self.waiter.until(lambda driver: self.is_text_equal(selector, text))
except TimeoutException:
raise Exception('Never saw text %s for %s' % (text, selector))
def wait_for_el(self, selector):
try:
self.waiter.until(lambda driver: self.is_el_present(selector))
except TimeoutException:
raise Exception('Never saw element %s' % (selector))
def wait_for_title(self, title):
try:
self.waiter.until(lambda driver: self.check_title(title))
except TimeoutException:
raise Exception('Never saw title change to %s' % (title))
def __init__(self, driver, waiter):
self.driver = driver
self.waiter = waiter | fergalmoran/dss | spa/tests/dom_helper.py | Python | bsd-2-clause | 8,159 |
# -*- coding: utf-8 -*-
from openerp.osv import fields, orm
import sys
class DummyModel(orm.Model):
# expect "no access rules" error
_name = 'dummy.model'
_columns = {
'name': fields.char('Dummy', size=100),
}
# printout non-ASCII text to check unicode issues
# (from Monty Python's "Italian Lesson"...)
sys.stdout.write("Eeeeeee! Milano è tanto meglio di Napoli. "
"Milano è la citta la più bella di tutti .. nel mondo...\n")
| MeetSCS/test_repo_2 | tests/test_repo/test_module/dummy_model.py | Python | agpl-3.0 | 476 |
#!/usr/bin/env python
#############################################################################
#
# Copyright (C) 2015 The Qt Company Ltd.
# Contact: http://www.qt.io/licensing/
#
# This file is part of the QtWebEngine module of the Qt Toolkit.
#
# $QT_BEGIN_LICENSE:LGPL$
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see http://www.qt.io/terms-conditions. For further
# information use the contact form at http://www.qt.io/contact-us.
#
# GNU Lesser General Public License Usage
# Alternatively, this file may be used under the terms of the GNU Lesser
# General Public License version 2.1 as published by the Free Software
# Foundation and appearing in the file LICENSE.LGPL included in the
# packaging of this file. Please review the following information to
# ensure the GNU Lesser General Public License version 2.1 requirements
# will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
#
# As a special exception, The Qt Company gives you certain additional
# rights. These rights are described in The Qt Company LGPL Exception
# version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3.0 as published by the Free Software
# Foundation and appearing in the file LICENSE.GPL included in the
# packaging of this file. Please review the following information to
# ensure the GNU General Public License version 3.0 requirements will be
# met: http://www.gnu.org/copyleft/gpl.html.
#
#
# $QT_END_LICENSE$
#
#############################################################################
import glob
import os
import subprocess
import sys
import string
import argparse
qtwebengine_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
import git_submodule as GitSubmodule
import version_resolver as resolver
chromium_src = os.environ.get('CHROMIUM_SRC_DIR')
ninja_src = os.path.join(qtwebengine_root, 'src/3rdparty_upstream/ninja')
use_external_chromium = False
parser = argparse.ArgumentParser(description='Initialize QtWebEngine repository.')
parser.add_argument('--baseline-upstream', action='store_true', help='initialize using upstream Chromium submodule w/o applying patches (for maintenance purposes only)')
group = parser.add_mutually_exclusive_group()
group.add_argument('-u', '--upstream', action='store_true', help='initialize using upstream Chromium submodule')
group.add_argument('-s', '--snapshot', action='store_true', help='initialize using flat Chromium snapshot submodule (default)')
args = parser.parse_args()
if args.baseline_upstream:
args.upstream = True
if chromium_src:
chromium_src = os.path.abspath(chromium_src)
use_external_chromium = True
if not chromium_src or not os.path.isdir(chromium_src):
if args.upstream:
chromium_src = os.path.join(qtwebengine_root, 'src/3rdparty_upstream/chromium')
if args.snapshot or not chromium_src:
chromium_src = os.path.join(qtwebengine_root, 'src/3rdparty/chromium')
ninja_src = os.path.join(qtwebengine_root, 'src/3rdparty/ninja')
args.snapshot = True
print 'CHROMIUM_SRC_DIR not set, using Chromium in' + chromium_src
if not args.baseline_upstream:
# Write our chromium sources directory into git config.
relative_chromium_src = os.path.relpath(chromium_src, qtwebengine_root)
subprocess.call(['git', 'config', 'qtwebengine.chromiumsrcdir', relative_chromium_src])
def updateLastChange():
if use_external_chromium:
return
currentDir = os.getcwd()
os.chdir(chromium_src)
print 'updating LASTCHANGE files'
subprocess.call(['python', 'build/util/lastchange.py', '-o', 'build/util/LASTCHANGE'])
subprocess.call(['python', 'build/util/lastchange.py', '-s', 'third_party/WebKit', '-o', 'build/util/LASTCHANGE.blink'])
os.chdir(currentDir)
def initUpstreamSubmodules():
ninja_url = 'https://github.com/martine/ninja.git'
chromium_url = 'https://chromium.googlesource.com/chromium/src.git'
ninja_shasum = 'refs/tags/' + resolver.currentNinjaVersion()
chromium_ref = 'refs/tags/' + resolver.currentVersion()
os.chdir(qtwebengine_root)
current_submodules = subprocess.check_output(['git', 'submodule'])
if not 'src/3rdparty_upstream/ninja' in current_submodules:
subprocess.call(['git', 'submodule', 'add', ninja_url, 'src/3rdparty_upstream/ninja'])
if not use_external_chromium and not 'src/3rdparty_upstream/chromium' in current_submodules:
subprocess.call(['git', 'submodule', 'add', chromium_url, 'src/3rdparty_upstream/chromium'])
ninjaSubmodule = GitSubmodule.Submodule()
ninjaSubmodule.path = 'src/3rdparty_upstream/ninja'
ninjaSubmodule.ref = ninja_shasum
ninjaSubmodule.url = ninja_url
ninjaSubmodule.os = 'all'
ninjaSubmodule.initialize()
if not use_external_chromium:
chromiumSubmodule = GitSubmodule.Submodule()
chromiumSubmodule.path = 'src/3rdparty_upstream/chromium'
chromiumSubmodule.ref = chromium_ref
chromiumSubmodule.url = chromium_url
chromiumSubmodule.os = 'all'
chromiumSubmodule.initialize()
chromiumSubmodule.initSubmodules()
# Unstage repositories so we do not accidentally commit them.
subprocess.call(['git', 'reset', '-q', 'HEAD', 'src/3rdparty_upstream/ninja'])
subprocess.call(['git', 'reset', '-q', 'HEAD', 'src/3rdparty_upstream/chromium'])
def initSnapshot():
snapshot = GitSubmodule.Submodule()
snapshot.path = 'src/3rdparty'
snapshot.os = 'all'
snapshot.initialize()
os.chdir(qtwebengine_root)
if args.upstream:
initUpstreamSubmodules()
updateLastChange()
if not args.baseline_upstream and not use_external_chromium:
subprocess.call(['python', os.path.join(qtwebengine_root, 'tools', 'scripts', 'patch_upstream.py')])
if args.snapshot:
initSnapshot()
| Tofee/qtwebengine | tools/scripts/init-repository.py | Python | gpl-2.0 | 6,221 |
"""
Based on initial work from django-debug-toolbar
"""
import re
from datetime import datetime
try:
from django.db import connections
except ImportError:
# Django version < 1.2
from django.db import connection
connections = {'default': connection}
try:
from django.db.backends import utils # renamed in django 1.7
except ImportError:
from django.db.backends import util as utils # removed in django 1.9
from django.conf import settings as django_settings
#from django.template import Node
from devserver.modules import DevServerModule
#from devserver.utils.stack import tidy_stacktrace, get_template_info
from devserver.utils.time import ms_from_timedelta
from devserver import settings
try:
import sqlparse
except ImportError:
class sqlparse:
@staticmethod
def format(text, *args, **kwargs):
return text
_sql_fields_re = re.compile(r'SELECT .*? FROM')
_sql_aggregates_re = re.compile(r'SELECT .*?(COUNT|SUM|AVERAGE|MIN|MAX).*? FROM')
def truncate_sql(sql, aggregates=True):
if not aggregates and _sql_aggregates_re.match(sql):
return sql
return _sql_fields_re.sub('SELECT ... FROM', sql)
# # TODO:This should be set in the toolbar loader as a default and panels should
# # get a copy of the toolbar object with access to its config dictionary
# SQL_WARNING_THRESHOLD = getattr(settings, 'DEVSERVER_CONFIG', {}) \
# .get('SQL_WARNING_THRESHOLD', 500)
try:
from debug_toolbar.panels.sql import DatabaseStatTracker
debug_toolbar = True
except ImportError:
debug_toolbar = False
import django
version = float('.'.join([str(x) for x in django.VERSION[:2]]))
if version >= 1.6:
DatabaseStatTracker = utils.CursorWrapper
else:
DatabaseStatTracker = utils.CursorDebugWrapper
class DatabaseStatTracker(DatabaseStatTracker):
"""
Replacement for CursorDebugWrapper which outputs information as it happens.
"""
logger = None
def execute(self, sql, params=()):
formatted_sql = sql % (params if isinstance(params, dict) else tuple(params))
if self.logger:
message = formatted_sql
if settings.DEVSERVER_FILTER_SQL:
if any(filter_.search(message) for filter_ in settings.DEVSERVER_FILTER_SQL):
message = None
if message is not None:
if settings.DEVSERVER_TRUNCATE_SQL:
message = truncate_sql(message, aggregates=settings.DEVSERVER_TRUNCATE_AGGREGATES)
message = sqlparse.format(message, reindent=True, keyword_case='upper')
self.logger.debug(message)
start = datetime.now()
try:
return super(DatabaseStatTracker, self).execute(sql, params)
finally:
stop = datetime.now()
duration = ms_from_timedelta(stop - start)
if self.logger and (not settings.DEVSERVER_SQL_MIN_DURATION
or duration > settings.DEVSERVER_SQL_MIN_DURATION):
if self.cursor.rowcount >= 0 and message is not None:
self.logger.debug('Found %s matching rows', self.cursor.rowcount, duration=duration)
if not (debug_toolbar or django_settings.DEBUG):
self.db.queries.append({
'sql': formatted_sql,
'time': duration,
})
def executemany(self, sql, param_list):
start = datetime.now()
try:
return super(DatabaseStatTracker, self).executemany(sql, param_list)
finally:
stop = datetime.now()
duration = ms_from_timedelta(stop - start)
if self.logger:
message = sqlparse.format(sql, reindent=True, keyword_case='upper')
message = 'Executed %s times\n' % message
self.logger.debug(message, duration=duration)
self.logger.debug('Found %s matching rows', self.cursor.rowcount, duration=duration, id='query')
if not (debug_toolbar or settings.DEBUG):
self.db.queries.append({
'sql': '%s times: %s' % (len(param_list), sql),
'time': duration,
})
class SQLRealTimeModule(DevServerModule):
"""
Outputs SQL queries as they happen.
"""
logger_name = 'sql'
def process_init(self, request):
if not issubclass(utils.CursorDebugWrapper, DatabaseStatTracker):
self.old_cursor = utils.CursorDebugWrapper
utils.CursorDebugWrapper = DatabaseStatTracker
DatabaseStatTracker.logger = self.logger
def process_complete(self, request):
if issubclass(utils.CursorDebugWrapper, DatabaseStatTracker):
utils.CursorDebugWrapper = self.old_cursor
class SQLSummaryModule(DevServerModule):
"""
Outputs a summary SQL queries.
"""
logger_name = 'sql'
def process_complete(self, request):
queries = [
q for alias in connections
for q in connections[alias].queries
]
num_queries = len(queries)
if num_queries:
unique = set([s['sql'] for s in queries])
self.logger.info('%(calls)s queries with %(dupes)s duplicates' % dict(
calls=num_queries,
dupes=num_queries - len(unique),
), duration=sum(float(c.get('time', 0)) for c in queries) * 1000)
| chriscauley/django-devserver | devserver/modules/sql.py | Python | bsd-3-clause | 5,470 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 University of Dundee & Open Microscopy Environment
# All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from scc.git import GitRepository
import pytest
from Mock import MoxTestBase
import logging
import subprocess
class MockGitRepository(GitRepository):
def __init__(self, gh, path):
self.log = logging.getLogger("test.test_gitrepository")
self.dbg = self.log.debug
self.info = self.log.info
self.debugWrap = None
self.infoWrap = None
self.errorWrap = None
self.gh = gh
self.path = path
def __del__(self):
pass
class MockPopen(object):
class MockIO(object):
def __init__(self):
self.n_close = 0
def close(self):
self.n_close += 1
def __init__(self, rcode, retout, reterr):
self.stdout = self.MockIO()
self.stderr = self.MockIO()
self.returncode = rcode
self.retout = retout
self.reterr = reterr
self.n_wait = 0
def communicate(self):
return self.retout, self.reterr
def wait(self):
self.n_wait += 1
return self.returncode
class TestGitRepository(MoxTestBase):
def setup_popen(self, rcode, stderr, stdout):
repo = MockGitRepository(None, '.')
self.mox.StubOutWithMock(subprocess, 'Popen')
p = MockPopen(rcode, 'out', 'err')
subprocess.Popen(
('cmd', 'a', 'b'), stdout=stdout, stderr=stderr).AndReturn(p)
return repo, p
@pytest.mark.parametrize('no_wait', [True, False])
@pytest.mark.parametrize('return_stderr', [True, False])
def test_communicate(self, tmpdir, no_wait, return_stderr):
repo, p = self.setup_popen(0, subprocess.PIPE, subprocess.PIPE)
self.mox.ReplayAll()
r = repo.communicate(
'cmd', 'a', 'b', no_wait=no_wait, return_stderr=return_stderr)
if return_stderr:
assert r == ('out', 'err')
else:
assert r == 'out'
assert p.stdout.n_close == 1
assert p.stderr.n_close == 1
assert p.n_wait == 0
def test_communicate_fail(self):
repo, p = self.setup_popen(1, subprocess.PIPE, subprocess.PIPE)
self.mox.ReplayAll()
with pytest.raises(Exception) as exc_info:
repo.communicate('cmd', 'a', 'b')
assert exc_info.value.message.startswith('Failed to run ')
assert p.stdout.n_close == 1
assert p.stderr.n_close == 1
assert p.n_wait == 0
@pytest.mark.parametrize('no_wait', [True, False])
def test_call_no_wait(self, no_wait):
repo, p = self.setup_popen(0, None, None)
self.mox.ReplayAll()
r = repo.call_no_wait('cmd', 'a', 'b', no_wait=no_wait)
assert r == p
assert p.stdout.n_close == 0
assert p.stderr.n_close == 0
assert p.n_wait == 0
@pytest.mark.parametrize('no_wait', [True, False])
def test_call(self, no_wait):
repo, p = self.setup_popen(0, None, None)
self.mox.ReplayAll()
r = repo.call_no_wait('cmd', 'a', 'b', no_wait=no_wait)
assert r == p
assert p.stdout.n_close == 0
assert p.stderr.n_close == 0
assert p.n_wait == 0 if no_wait else 1
def test_call_fail(self):
repo, p = self.setup_popen(1, None, None)
self.mox.ReplayAll()
with pytest.raises(Exception) as exc_info:
repo.call('cmd', 'a', 'b')
assert exc_info.value.message == 'rc=1'
assert p.stdout.n_close == 0
assert p.stderr.n_close == 0
assert p.n_wait == 1
| snoopycrimecop/snoopycrimecop | test/unit/test_gitrepository.py | Python | gpl-2.0 | 4,349 |
import json
class InfrastructureUpgradeLevel(object):
name = ''
upgrade_cost = 0
no_of_turns_required = 0
transaction_benefit = 0
def __init__(
self,
name='',
upgrade_cost=0,
no_of_turns_required=0,
transaction_benefit=0
):
self.name = name
self.upgrade_cost = upgrade_cost
self.no_of_turns_required = no_of_turns_required
self.transaction_benefit = transaction_benefit
def __repr__(self):
return json.dumps(self.to_dict())
def to_dict(self):
return {
'Name': self.name,
'UpgradeCost': self.upgrade_cost,
'NoOfTurnsRequired': self.no_of_turns_required,
'TransactionBenefit': self.transaction_benefit
}
@staticmethod
def json_factory(input_json):
result = InfrastructureUpgradeLevel()
result.name = input_json['Name']
result.upgrade_cost = input_json['UpgradeCost']
result.no_of_turns_required = input_json['NoOfTurnsRequired']
result.transaction_benefit = input_json['TransactionBenefit']
return result
| travisby/kronos2 | hermes_api/base/infrastructure_upgrade_level.py | Python | bsd-2-clause | 1,143 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, max_length=30, verbose_name='username', validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username.', 'invalid')])),
('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)),
('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)),
('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_bot', models.BooleanField(default=False, editable=False)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'abstract': False,
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
bases=(models.Model,),
),
]
| JoepDriesen/Rikker | authentication/migrations/0001_initial.py | Python | gpl-2.0 | 2,775 |
# -*- coding: utf-8 -*-
# Scrapy settings for crawler project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'movie_crawler'
SPIDER_MODULES = ['crawler.spiders']
NEWSPIDER_MODULE = 'crawler.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS=32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY=3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN=16
#CONCURRENT_REQUESTS_PER_IP=16
# Disable cookies (enabled by default)
COOKIES_ENABLED=False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED=False
# Override the default request headers:
DEFAULT_REQUEST_HEADERS = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, sdch, br',
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.6,en;q=0.4',
'Cache-Control': 'max-age=0',
}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'crawler.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'crawler.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'crawler.pipelines.SomePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
# NOTE: AutoThrottle will honour the standard settings for concurrency and delay
#AUTOTHROTTLE_ENABLED=True
# The initial download delay
#AUTOTHROTTLE_START_DELAY=5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY=60
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG=False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED=True
#HTTPCACHE_EXPIRATION_SECS=0
#HTTPCACHE_DIR='httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES=[]
#HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
| ryerh/py-crawler | crawler/settings.py | Python | apache-2.0 | 3,200 |
import imp
import os
from DIRAC import gConfig, gLogger
from DIRAC.Core.Utilities.CFG import CFG
from DIRAC.ConfigurationSystem.Client.Helpers import CSGlobals
BASECS = "WebApp"
def loadWebAppCFGFiles():
"""
Load WebApp/web.cfg definitions
"""
exts = []
for ext in CSGlobals.getCSExtensions():
if ext == "DIRAC":
continue
if ext[-5:] != "DIRAC":
ext = "%sDIRAC" % ext
if ext != "WebAppDIRAC":
exts.append( ext )
exts.append( "DIRAC" )
exts.append( "WebAppDIRAC" )
webCFG = CFG()
for modName in reversed( exts ):
try:
modPath = imp.find_module( modName )[1]
except ImportError:
continue
gLogger.verbose( "Found module %s at %s" % ( modName, modPath ) )
cfgPath = os.path.join( modPath, "WebApp", "web.cfg" )
if not os.path.isfile( cfgPath ):
gLogger.verbose( "Inexistant %s" % cfgPath )
continue
try:
modCFG = CFG().loadFromFile( cfgPath )
except Exception, excp:
gLogger.error( "Could not load %s: %s" % ( cfgPath, excp ) )
continue
gLogger.verbose( "Loaded %s" % cfgPath )
expl = [ BASECS ]
while len( expl ):
current = expl.pop( 0 )
if not modCFG.isSection( current ):
continue
if modCFG.getOption( "%s/AbsoluteDefinition" % current, False ):
gLogger.verbose( "%s:%s is an absolute definition" % ( modName, current ) )
try:
webCFG.deleteKey( current )
except:
pass
modCFG.deleteKey( "%s/AbsoluteDefinition" % current )
else:
for sec in modCFG[ current ].listSections():
expl.append( "%s/%s" % ( current, sec ) )
#Add the modCFG
webCFG = webCFG.mergeWith( modCFG )
gConfig.loadCFG( webCFG )
def getRawSchema():
"""
Load the schema from the CS
"""
base = "%s/Schema" % ( BASECS )
schema = []
explore = [ ( "", schema ) ]
while len( explore ):
parentName, parentData = explore.pop( 0 )
fullName = "%s/%s" % ( base, parentName )
result = gConfig.getSections( fullName )
if not result[ 'OK' ]:
continue
sectionsList = result[ 'Value' ]
for sName in sectionsList:
sData = []
parentData.append( ( "%s/%s" % ( parentName, sName ), sData ) )
explore.append( ( sName, sData ) )
result = gConfig.getOptions( fullName )
if not result[ 'OK' ]:
continue
optionsList = result[ 'Value' ]
for opName in optionsList:
opVal = gConfig.getValue( "%s/%s" % ( fullName, opName ) )
if opVal.find( "link|" ) == 0:
parentData.append( ( "link", opName, opVal[5:] ) )
else:
parentData.append( ( "app", opName, opVal ) )
return schema
| zmathe/WebAppDIRAC | Core/ConfMgr.py | Python | gpl-3.0 | 2,674 |
import sys
import argparse
def parseArgument():
# Parse the input
parser=argparse.ArgumentParser(description=\
"Make a script that will convert bedgraph to bigwig files")
parser.add_argument("--fileNamePrefixList", required=True,\
help='Name of file with a list of bedgraph file prefixes that will also be used for bigwigs, should not end with _')
parser.add_argument("--chromSizes", required=False,\
default="/mnt/data/annotations/by_organism/human/hg19.GRCh37/hg19.chrom.sizes", \
help='Name of file with the chromosome lengths')
parser.add_argument("--scriptFileName", required=True,\
help='Name of file where the script will be recorded')
options = parser.parse_args()
return options
def makebedGraphToBigWigScript(options):
# Make a script that will convert bedgraph to bigwig files
fileNamePrefixListFile = open(options.fileNamePrefixList)
scriptFile = open(options.scriptFileName, 'w+')
for line in fileNamePrefixListFile:
# Iterate through the file name prefixes and make a line in the script for each prefix, base, sequence/RC combination
fileNamePrefix = line.strip()
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresA_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresA_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresC_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresC_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresG_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresG_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresT_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresT_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresA_RC_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresA_RC_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresC_RC_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresC_RC_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresG_RC_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresG_RC_sorted.bw" + "\n")
scriptFile.write("bedGraphToBigWig " + fileNamePrefix + "_deepLIFTScoresT_RC_sorted.bedgraph " + options.chromSizes + " " + fileNamePrefix + "_deepLIFTScoresT_RC_sorted.bw" + "\n")
fileNamePrefixListFile.close()
scriptFile.close()
if __name__ == "__main__":
options = parseArgument()
makebedGraphToBigWigScript(options)
| imk1/IMKTFBindingCode | makebedGraphToBigWigScript.py | Python | mit | 2,695 |
#!/usr/bin/env python
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
import socketio
import time
from bridge import Bridge
from conf import conf
sio = socketio.Server(async_mode='gevent')
msgs = []
dbw_enable = False
@sio.on('connect')
def connect(sid, environ):
print("connect ", sid)
def send(topic, data):
sio.emit(topic, data=data, skip_sid=True)
bridge = Bridge(conf, send)
@sio.on('telemetry')
def telemetry(sid, data):
global dbw_enable
if data["dbw_enable"] != dbw_enable:
dbw_enable = data["dbw_enable"]
bridge.publish_dbw_status(dbw_enable)
bridge.publish_odometry(data)
@sio.on('control')
def control(sid, data):
bridge.publish_controls(data)
@sio.on('obstacle')
def obstacle(sid, data):
bridge.publish_obstacles(data)
@sio.on('lidar')
def obstacle(sid, data):
bridge.publish_lidar(data)
@sio.on('trafficlights')
def trafficlights(sid, data):
bridge.publish_traffic(data)
@sio.on('image')
def image(sid, data):
bridge.publish_camera(data)
if __name__ == '__main__':
# Create socketio WSGI application
app = socketio.WSGIApp(sio)
# deploy as an gevent WSGI server
pywsgi.WSGIServer(('', 4567), app, handler_class=WebSocketHandler).serve_forever() | vsingla2/Self-Driving-Car-NanoDegree-Udacity | Term3-Path-Planning-Concentrations-and-Systems/Project3-System-Integration-Capstone/ros/src/styx/server.py | Python | mit | 1,277 |
# Copyright (C) 2009 Stijn Cole
# Copyright (C) 2010-2011 Richard Lincoln
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pylab import close, figure, xlabel, ylabel, hold, plot, axis, legend
from pydyn.Pdoption import Pdoption
from pydyn.rundyn import rundyn
def Test1():
""" test script """
pdopt = Pdoption()
pdopt[4] = 0 # no progress info
pdopt[5] = 1 # no plots
## Modified Euler
# Set options
pdopt[0] = 1 # Modified Euler
# Run dynamic simulation
print '> Modified Euler...'
Angles1, Speeds, Eq_tr, Ed_tr, Efd, PM, Voltages, Stepsize1, Errest, Time1 = \
rundyn('casestagg', 'casestaggdyn', 'staggevent', pdopt)
## Runge-Kutta
# Set options
pdopt[0] = 2 # Runge-Kutta
# Run dynamic simulation
print 'Done.\n> Runge-Kutta...'
Angles2, Speeds, Eq_tr, Ed_tr, Efd, PM, Voltages, Stepsize2, Errest, Time2 = \
rundyn('casestagg', 'casestaggdyn', 'staggevent', pdopt)
## Fehlberg
# Set options
pdopt[0] = 3 # Runge-Kutta Fehlberg
pdopt[1] = 1e-4 # tol = 1e-4
pdopt[2] = 1e-4 # minimum step size = 1e-4
# Run dynamic simulation
print 'Done.\n> Runge-Kutta Fehlberg...'
Angles3, Speeds, Eq_tr, Ed_tr, Efd, PM, Voltages, Stepsize3, Errest, Time3 = \
rundyn('casestagg', 'casestaggdyn', 'staggevent', pdopt)
## Higham-Hall
# Set options
pdopt[0] = 4 # Runge-Kutta Higham-Hall
pdopt[1] = 1e-4 # tol = 1e-4
pdopt[2] = 1e-4 # minimum step size = 1e-4
# Run dynamic simulation
print 'Done.\n> Runge-Kutta Higham-Hall...'
Angles4, Speeds, Eq_tr, Ed_tr, Efd, PM, Voltages, Stepsize4, Errest, Time4 = \
rundyn('casestagg', 'casestaggdyn', 'staggevent', pdopt)
print 'Done.\n'
## Plots
# Plot angles
close("all")
figure
hold(True)
xlabel('Time [s]')
ylabel('Generator angles [deg]')
p1 = plot(Time1, Angles1[:, :1], '-.b')
p2 = plot(Time2, Angles2[:, :1], ':r')
p3 = plot(Time3, Angles3[:, :1], '--g')
p4 = plot(Time4, Angles4[:, :1], 'm')
Group1 = hggroup
Group2 = hggroup
Group3 = hggroup
Group4 = hggroup
set(p1, 'Parent', Group1)
set(p2, 'Parent', Group2)
set(p3, 'Parent', Group3)
set(p4, 'Parent', Group4)
set(get(get(Group1,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
set(get(get(Group2,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
set(get(get(Group3,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
set(get(get(Group4,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
legend('Modified Euler','Runge-Kutta','Fehlberg','Higham-Hall')
axis([0, Time1(-1), -1, 1])
axis('auto y')
figure
hold(True)
p1 = plot(Time1, Stepsize1, ':b')
p2 = plot(Time3, Stepsize3, '--g')
p3 = plot(Time4, Stepsize4, 'm')
Group1 = hggroup
Group2 = hggroup
Group3 = hggroup
set(p1,'Parent',Group1)
set(p2,'Parent',Group2)
set(p3,'Parent',Group3)
set(get(get(Group1,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
set(get(get(Group2,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
set(get(get(Group3,'Annotation'),'LegendInformation'),
'IconDisplayStyle','on')
xlabel('Time [s]')
ylabel('Step size')
legend('Modified Euler and Runge-Kutta','Fehlberg','Higham-Hall')
axis([0, Time1(-1), -1, 1])
axis('auto y')
| rwl/PyDyn | pydyn/Test1.py | Python | apache-2.0 | 4,002 |
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from unittest import mock
from odoo.exceptions import UserError
from odoo.tests.common import TransactionCase
model = "odoo.addons.base_report_to_printer.models.printing_server"
class StopTest(Exception):
pass
class TestPrintingPrinterWizard(TransactionCase):
def setUp(self):
super().setUp()
self.Model = self.env["printing.printer.update.wizard"]
self.server = self.env["printing.server"].create({})
self.printer_vals = {
"printer-info": "Info",
"printer-make-and-model": "Make and Model",
"printer-location": "location",
"device-uri": "URI",
"printer-uri-supported": "uri",
}
def _record_vals(self, sys_name="sys_name"):
return {
"name": self.printer_vals["printer-info"],
"server_id": self.server.id,
"system_name": sys_name,
"model": self.printer_vals["printer-make-and-model"],
"location": self.printer_vals["printer-location"],
"uri": self.printer_vals["device-uri"],
}
@mock.patch("%s.cups" % model)
def test_action_ok_inits_connection(self, cups):
""" It should initialize CUPS connection """
self.Model.action_ok()
cups.Connection.assert_called_once_with(
host=self.server.address, port=self.server.port
)
@mock.patch("%s.cups" % model)
def test_action_ok_gets_printers(self, cups):
""" It should get printers from CUPS """
cups.Connection().getPrinters.return_value = {"sys_name": self.printer_vals}
cups.Connection().getPPD3.return_value = (200, 0, "")
self.Model.action_ok()
cups.Connection().getPrinters.assert_called_once_with()
@mock.patch("%s.cups" % model)
def test_action_ok_raises_warning_on_error(self, cups):
""" It should raise Warning on any error """
cups.Connection.side_effect = StopTest
with self.assertRaises(UserError):
self.Model.action_ok()
@mock.patch("%s.cups" % model)
def test_action_ok_creates_new_printer(self, cups):
""" It should create new printer w/ proper vals """
cups.Connection().getPrinters.return_value = {"sys_name": self.printer_vals}
cups.Connection().getPPD3.return_value = (200, 0, "")
self.Model.action_ok()
rec_id = self.env["printing.printer"].search(
[("system_name", "=", "sys_name")], limit=1
)
self.assertTrue(rec_id)
for key, val in self._record_vals().items():
if rec_id._fields[key].type == "many2one":
val = self.env[rec_id._fields[key].comodel_name].browse(val)
self.assertEqual(val, rec_id[key])
@mock.patch("%s.cups" % model)
def test_action_ok_skips_existing_printer(self, cups):
""" It should not recreate existing printers """
cups.Connection().getPrinters.return_value = {"sys_name": self.printer_vals}
cups.Connection().getPPD3.return_value = (200, 0, "")
self.env["printing.printer"].create(self._record_vals())
self.Model.action_ok()
res_ids = self.env["printing.printer"].search(
[("system_name", "=", "sys_name")]
)
self.assertEqual(1, len(res_ids))
| OCA/report-print-send | base_report_to_printer/tests/test_printing_printer_wizard.py | Python | agpl-3.0 | 3,391 |
#! /usr/bin/env python
# encoding: utf-8
import io
from setuptools import setup, find_packages
NAME = "AnyQt"
VERSION = "0.1.0"
AUTHOR = "Aleš Erjavec"
AUTHOR_EMAIL = "ales.erjavec@fri.uni-lj.si"
URL = "https://github.com/ales-erjavec/anyqt"
PACKAGES = find_packages(".", include="AnyQt*")
DESCRIPTION = "PyQt5/PyQt6 compatibility layer."
with io.open("README.txt", encoding="utf-8") as f:
README = f.read()
LICENSE = "GPLv3"
CLASSIFIERS = [
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
]
KEYWORDS = [
"GUI", "PyQt4", "PyQt5", "PySide", "PySide2", "compatibility"
]
PROJECT_URLS = {
"Bug Reports": "https://github.com/ales-erjavec/anyqt/issues",
"Source": URL,
"Documentation": "https://anyqt.readthedocs.io/en/stable/"
}
if __name__ == "__main__":
setup(
name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=README,
license=LICENSE,
keywords=KEYWORDS,
classifiers=CLASSIFIERS,
packages=PACKAGES,
project_urls=PROJECT_URLS,
python_requires=">=3.6",
)
| ales-erjavec/anyqt | setup.py | Python | gpl-3.0 | 1,397 |
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from sickbeard import logger
from sickbeard import tvcache
from sickrage.providers.NZBProvider import NZBProvider
class WombleProvider(NZBProvider):
def __init__(self):
NZBProvider.__init__(self, "Womble's Index")
self.public = True
self.cache = WombleCache(self)
self.urls = {'base_url': 'http://newshost.co.za/'}
self.url = self.urls['base_url']
self.supports_backlog = False
class WombleCache(tvcache.TVCache):
def __init__(self, provider_obj):
tvcache.TVCache.__init__(self, provider_obj)
# only poll Womble's Index every 15 minutes max
self.minTime = 15
def updateCache(self):
# check if we should update
if not self.shouldUpdate():
return
# clear cache
self._clearCache()
# set updated
self.setLastUpdate()
cl = []
for url in [self.provider.url + 'rss/?sec=tv-x264&fr=false',
self.provider.url + 'rss/?sec=tv-sd&fr=false',
self.provider.url + 'rss/?sec=tv-dvd&fr=false',
self.provider.url + 'rss/?sec=tv-hd&fr=false']:
logger.log(u"Cache update URL: %s" % url, logger.DEBUG)
for item in self.getRSSFeed(url)['entries'] or []:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _checkAuth(self, data):
return data if data['feed'] and data['feed']['title'] != 'Invalid Link' else None
provider = WombleProvider()
| Thraxis/SickRage | sickbeard/providers/womble.py | Python | gpl-3.0 | 2,381 |
import sys
from models import Company, Tech, Location, setup_db
from parser import parse_directory
def load_company(session, company_data):
company = Company(
name=company_data.get('name'),
website=company_data.get('website'),
description=company_data.get('description'),
size=company_data.get('size'),
remote=company_data.get('allows_remote')
)
company.techs = [Tech(name=tech_name) for tech_name in company_data.get('stack', [])]
company.location = Location(
city=company_data['location'].get('city'),
country=company_data['location'].get('country'),
postcode=company_data['location'].get('postcode')
)
session.add(company)
session.commit()
if __name__ == "__main__":
db_url = 'postgresql://testing:password@localhost/ourstack'
companies_path = '../companies'
# allows specifying the db url when calling the script directly
if len(sys.argv) > 1:
db_url = sys.argv[1]
session = setup_db(db_url)
for company_data in parse_directory(companies_path):
load_company(session, company_data)
for instance in session.query(Company):
print(instance.name)
| Keats/ourstack | bootstrap/main.py | Python | mit | 1,201 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.