text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Utility for creating well-formed pull request merges and pushing them to Apache. This script is a modified version
# of the one created by the Spark project (https://github.com/apache/spark/blob/master/dev/merge_spark_pr.py).
#
# Usage: ./kafka-merge-pr.py (see config env vars below)
#
# This utility assumes you already have local a kafka git folder and that you
# have added remotes corresponding to both:
# (i) the github apache kafka mirror and
# (ii) the apache kafka git repo.
import json
import os
import re
import subprocess
import sys
import urllib2
try:
import jira.client
JIRA_IMPORTED = True
except ImportError:
JIRA_IMPORTED = False
PROJECT_NAME = "kafka"
CAPITALIZED_PROJECT_NAME = "kafka".upper()
# Location of the local git repository
REPO_HOME = os.environ.get("%s_HOME" % CAPITALIZED_PROJECT_NAME, os.getcwd())
# Remote name which points to the GitHub site
PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache-github")
# Remote name where we want to push the changes to (GitHub by default, but Apache Git would work if GitHub is down)
PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache-github")
# ASF JIRA username
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", "")
# ASF JIRA password
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", "")
# OAuth key used for issuing requests against the GitHub API. If this is not defined, then requests
# will be unauthenticated. You should only need to configure this if you find yourself regularly
# exceeding your IP's unauthenticated request rate limit. You can create an OAuth key at
# https://github.com/settings/tokens. This script only requires the "public_repo" scope.
GITHUB_OAUTH_KEY = os.environ.get("GITHUB_OAUTH_KEY")
GITHUB_USER = os.environ.get("GITHUB_USER", "apache")
GITHUB_BASE = "https://github.com/%s/%s/pull" % (GITHUB_USER, PROJECT_NAME)
GITHUB_API_BASE = "https://api.github.com/repos/%s/%s" % (GITHUB_USER, PROJECT_NAME)
JIRA_BASE = "https://issues.apache.org/jira/browse"
JIRA_API_BASE = "https://issues.apache.org/jira"
# Prefix added to temporary branches
TEMP_BRANCH_PREFIX = "PR_TOOL"
DEV_BRANCH_NAME = "trunk"
DEFAULT_FIX_VERSION = os.environ.get("DEFAULT_FIX_VERSION", "2.3.0")
def get_json(url):
try:
request = urllib2.Request(url)
if GITHUB_OAUTH_KEY:
request.add_header('Authorization', 'token %s' % GITHUB_OAUTH_KEY)
return json.load(urllib2.urlopen(request))
except urllib2.HTTPError as e:
if "X-RateLimit-Remaining" in e.headers and e.headers["X-RateLimit-Remaining"] == '0':
print "Exceeded the GitHub API rate limit; see the instructions in " + \
"kafka-merge-pr.py to configure an OAuth token for making authenticated " + \
"GitHub requests."
else:
print "Unable to fetch URL, exiting: %s" % url
sys.exit(-1)
def fail(msg):
print msg
clean_up()
sys.exit(-1)
def run_cmd(cmd):
print cmd
if isinstance(cmd, list):
return subprocess.check_output(cmd)
else:
return subprocess.check_output(cmd.split(" "))
def continue_maybe(prompt):
result = raw_input("\n%s (y/n): " % prompt)
if result.lower() != "y":
fail("Okay, exiting")
def clean_up():
if original_head != get_current_branch():
print "Restoring head pointer to %s" % original_head
run_cmd("git checkout %s" % original_head)
branches = run_cmd("git branch").replace(" ", "").split("\n")
for branch in filter(lambda x: x.startswith(TEMP_BRANCH_PREFIX), branches):
print "Deleting local branch %s" % branch
run_cmd("git branch -D %s" % branch)
def get_current_branch():
return run_cmd("git rev-parse --abbrev-ref HEAD").replace("\n", "")
# merge the requested PR and return the merge hash
def merge_pr(pr_num, target_ref, title, body, pr_repo_desc):
pr_branch_name = "%s_MERGE_PR_%s" % (TEMP_BRANCH_PREFIX, pr_num)
target_branch_name = "%s_MERGE_PR_%s_%s" % (TEMP_BRANCH_PREFIX, pr_num, target_ref.upper())
run_cmd("git fetch %s pull/%s/head:%s" % (PR_REMOTE_NAME, pr_num, pr_branch_name))
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, target_ref, target_branch_name))
run_cmd("git checkout %s" % target_branch_name)
had_conflicts = False
try:
run_cmd(['git', 'merge', pr_branch_name, '--squash'])
except Exception as e:
msg = "Error merging: %s\nWould you like to manually fix-up this merge?" % e
continue_maybe(msg)
msg = "Okay, please fix any conflicts and 'git add' conflicting files... Finished?"
continue_maybe(msg)
had_conflicts = True
commit_authors = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
'--pretty=format:%an <%ae>']).split("\n")
distinct_authors = sorted(set(commit_authors),
key=lambda x: commit_authors.count(x), reverse=True)
primary_author = raw_input(
"Enter primary author in the format of \"name <email>\" [%s]: " %
distinct_authors[0])
if primary_author == "":
primary_author = distinct_authors[0]
reviewers = raw_input(
"Enter reviewers in the format of \"name1 <email1>, name2 <email2>\": ").strip()
run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name, '--pretty=format:%h [%an] %s']).split("\n")
merge_message_flags = []
merge_message_flags += ["-m", title]
if body is not None:
# Remove "Committer Checklist" section
checklist_index = body.find("### Committer Checklist")
if checklist_index != -1:
body = body[:checklist_index].rstrip()
# Remove @ symbols from the body to avoid triggering e-mails to people every time someone creates a
# public fork of the project.
body = body.replace("@", "")
merge_message_flags += ["-m", body]
authors = "\n".join(["Author: %s" % a for a in distinct_authors])
merge_message_flags += ["-m", authors]
if reviewers != "":
merge_message_flags += ["-m", "Reviewers: %s" % reviewers]
if had_conflicts:
committer_name = run_cmd("git config --get user.name").strip()
committer_email = run_cmd("git config --get user.email").strip()
message = "This patch had conflicts when merged, resolved by\nCommitter: %s <%s>" % (
committer_name, committer_email)
merge_message_flags += ["-m", message]
# The string "Closes #%s" string is required for GitHub to correctly close the PR
close_line = "Closes #%s from %s" % (pr_num, pr_repo_desc)
merge_message_flags += ["-m", close_line]
run_cmd(['git', 'commit', '--author="%s"' % primary_author] + merge_message_flags)
continue_maybe("Merge complete (local ref %s). Push to %s?" % (
target_branch_name, PUSH_REMOTE_NAME))
try:
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, target_branch_name, target_ref))
except Exception as e:
clean_up()
fail("Exception while pushing: %s" % e)
merge_hash = run_cmd("git rev-parse %s" % target_branch_name)[:8]
clean_up()
print("Pull request #%s merged!" % pr_num)
print("Merge hash: %s" % merge_hash)
return merge_hash
def cherry_pick(pr_num, merge_hash, default_branch):
pick_ref = raw_input("Enter a branch name [%s]: " % default_branch)
if pick_ref == "":
pick_ref = default_branch
pick_branch_name = "%s_PICK_PR_%s_%s" % (TEMP_BRANCH_PREFIX, pr_num, pick_ref.upper())
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, pick_ref, pick_branch_name))
run_cmd("git checkout %s" % pick_branch_name)
try:
run_cmd("git cherry-pick -sx %s" % merge_hash)
except Exception as e:
msg = "Error cherry-picking: %s\nWould you like to manually fix-up this merge?" % e
continue_maybe(msg)
msg = "Okay, please fix any conflicts and finish the cherry-pick. Finished?"
continue_maybe(msg)
continue_maybe("Pick complete (local ref %s). Push to %s?" % (
pick_branch_name, PUSH_REMOTE_NAME))
try:
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, pick_branch_name, pick_ref))
except Exception as e:
clean_up()
fail("Exception while pushing: %s" % e)
pick_hash = run_cmd("git rev-parse %s" % pick_branch_name)[:8]
clean_up()
print("Pull request #%s picked into %s!" % (pr_num, pick_ref))
print("Pick hash: %s" % pick_hash)
return pick_ref
def fix_version_from_branch(branch, versions):
# Note: Assumes this is a sorted (newest->oldest) list of un-released versions
if branch == DEV_BRANCH_NAME:
versions = filter(lambda x: x == DEFAULT_FIX_VERSION, versions)
if len(versions) > 0:
return versions[0]
else:
return None
else:
versions = filter(lambda x: x.startswith(branch), versions)
if len(versions) > 0:
return versions[-1]
else:
return None
def resolve_jira_issue(merge_branches, comment, default_jira_id=""):
asf_jira = jira.client.JIRA({'server': JIRA_API_BASE},
basic_auth=(JIRA_USERNAME, JIRA_PASSWORD))
jira_id = raw_input("Enter a JIRA id [%s]: " % default_jira_id)
if jira_id == "":
jira_id = default_jira_id
try:
issue = asf_jira.issue(jira_id)
except Exception as e:
fail("ASF JIRA could not find %s\n%s" % (jira_id, e))
cur_status = issue.fields.status.name
cur_summary = issue.fields.summary
cur_assignee = issue.fields.assignee
if cur_assignee is None:
cur_assignee = "NOT ASSIGNED!!!"
else:
cur_assignee = cur_assignee.displayName
if cur_status == "Resolved" or cur_status == "Closed":
fail("JIRA issue %s already has status '%s'" % (jira_id, cur_status))
print ("=== JIRA %s ===" % jira_id)
print ("summary\t\t%s\nassignee\t%s\nstatus\t\t%s\nurl\t\t%s/%s\n" % (
cur_summary, cur_assignee, cur_status, JIRA_BASE, jira_id))
versions = asf_jira.project_versions(CAPITALIZED_PROJECT_NAME)
versions = sorted(versions, key=lambda x: x.name, reverse=True)
versions = filter(lambda x: x.raw['released'] is False, versions)
version_names = map(lambda x: x.name, versions)
default_fix_versions = map(lambda x: fix_version_from_branch(x, version_names), merge_branches)
default_fix_versions = filter(lambda x: x != None, default_fix_versions)
default_fix_versions = ",".join(default_fix_versions)
fix_versions = raw_input("Enter comma-separated fix version(s) [%s]: " % default_fix_versions)
if fix_versions == "":
fix_versions = default_fix_versions
fix_versions = fix_versions.replace(" ", "").split(",")
def get_version_json(version_str):
return filter(lambda v: v.name == version_str, versions)[0].raw
jira_fix_versions = map(lambda v: get_version_json(v), fix_versions)
resolve = filter(lambda a: a['name'] == "Resolve Issue", asf_jira.transitions(jira_id))[0]
resolution = filter(lambda r: r.raw['name'] == "Fixed", asf_jira.resolutions())[0]
asf_jira.transition_issue(
jira_id, resolve["id"], fixVersions = jira_fix_versions,
comment = comment, resolution = {'id': resolution.raw['id']})
print "Successfully resolved %s with fixVersions=%s!" % (jira_id, fix_versions)
def resolve_jira_issues(title, merge_branches, comment):
jira_ids = re.findall("%s-[0-9]{4,5}" % CAPITALIZED_PROJECT_NAME, title)
if len(jira_ids) == 0:
resolve_jira_issue(merge_branches, comment)
for jira_id in jira_ids:
resolve_jira_issue(merge_branches, comment, jira_id)
def standardize_jira_ref(text):
"""
Standardize the jira reference commit message prefix to "PROJECT_NAME-XXX; Issue"
>>> standardize_jira_ref("%s-5954; Top by key" % CAPITALIZED_PROJECT_NAME)
'KAFKA-5954; Top by key'
>>> standardize_jira_ref("%s-5821; ParquetRelation2 CTAS should check if delete is successful" % PROJECT_NAME)
'KAFKA-5821; ParquetRelation2 CTAS should check if delete is successful'
>>> standardize_jira_ref("%s-4123 [WIP] Show new dependencies added in pull requests" % PROJECT_NAME)
'KAFKA-4123; [WIP] Show new dependencies added in pull requests'
>>> standardize_jira_ref("%s 5954: Top by key" % PROJECT_NAME)
'KAFKA-5954; Top by key'
>>> standardize_jira_ref("%s-979 a LRU scheduler for load balancing in TaskSchedulerImpl" % PROJECT_NAME)
'KAFKA-979; a LRU scheduler for load balancing in TaskSchedulerImpl'
>>> standardize_jira_ref("%s-1094 Support MiMa for reporting binary compatibility across versions." % CAPITALIZED_PROJECT_NAME)
'KAFKA-1094; Support MiMa for reporting binary compatibility across versions.'
>>> standardize_jira_ref("[WIP] %s-1146; Vagrant support" % CAPITALIZED_PROJECT_NAME)
'KAFKA-1146; [WIP] Vagrant support'
>>> standardize_jira_ref("%s-1032. If Yarn app fails before registering, app master stays aroun..." % PROJECT_NAME)
'KAFKA-1032; If Yarn app fails before registering, app master stays aroun...'
>>> standardize_jira_ref("%s-6250 %s-6146 %s-5911: Types are now reserved words in DDL parser." % (PROJECT_NAME, PROJECT_NAME, CAPITALIZED_PROJECT_NAME))
'KAFKA-6250 KAFKA-6146 KAFKA-5911; Types are now reserved words in DDL parser.'
>>> standardize_jira_ref("Additional information for users building from source code")
'Additional information for users building from source code'
"""
jira_refs = []
components = []
# Extract JIRA ref(s):
pattern = re.compile(r'(%s[-\s]*[0-9]{3,6})+' % CAPITALIZED_PROJECT_NAME, re.IGNORECASE)
for ref in pattern.findall(text):
# Add brackets, replace spaces with a dash, & convert to uppercase
jira_refs.append(re.sub(r'\s+', '-', ref.upper()))
text = text.replace(ref, '')
# Extract project name component(s):
# Look for alphanumeric chars, spaces, dashes, periods, and/or commas
pattern = re.compile(r'(\[[\w\s,-\.]+\])', re.IGNORECASE)
for component in pattern.findall(text):
components.append(component.upper())
text = text.replace(component, '')
# Cleanup any remaining symbols:
pattern = re.compile(r'^\W+(.*)', re.IGNORECASE)
if (pattern.search(text) is not None):
text = pattern.search(text).groups()[0]
# Assemble full text (JIRA ref(s), module(s), remaining text)
jira_prefix = ' '.join(jira_refs).strip()
if jira_prefix:
jira_prefix = jira_prefix + "; "
clean_text = jira_prefix + ' '.join(components).strip() + " " + text.strip()
# Replace multiple spaces with a single space, e.g. if no jira refs and/or components were included
clean_text = re.sub(r'\s+', ' ', clean_text.strip())
return clean_text
def main():
global original_head
original_head = get_current_branch()
branches = get_json("%s/branches" % GITHUB_API_BASE)
branch_names = filter(lambda x: x[0].isdigit(), [x['name'] for x in branches])
# Assumes branch names can be sorted lexicographically
latest_branch = sorted(branch_names, reverse=True)[0]
pr_num = raw_input("Which pull request would you like to merge? (e.g. 34): ")
pr = get_json("%s/pulls/%s" % (GITHUB_API_BASE, pr_num))
pr_events = get_json("%s/issues/%s/events" % (GITHUB_API_BASE, pr_num))
url = pr["url"]
pr_title = pr["title"]
commit_title = raw_input("Commit title [%s]: " % pr_title.encode("utf-8")).decode("utf-8")
if commit_title == "":
commit_title = pr_title
# Decide whether to use the modified title or not
modified_title = standardize_jira_ref(commit_title)
if modified_title != commit_title:
print "I've re-written the title as follows to match the standard format:"
print "Original: %s" % commit_title
print "Modified: %s" % modified_title
result = raw_input("Would you like to use the modified title? (y/n): ")
if result.lower() == "y":
commit_title = modified_title
print "Using modified title:"
else:
print "Using original title:"
print commit_title
body = pr["body"]
target_ref = pr["base"]["ref"]
user_login = pr["user"]["login"]
base_ref = pr["head"]["ref"]
pr_repo_desc = "%s/%s" % (user_login, base_ref)
# Merged pull requests don't appear as merged in the GitHub API;
# Instead, they're closed by asfgit.
merge_commits = \
[e for e in pr_events if e["actor"]["login"] == "asfgit" and e["event"] == "closed"]
if merge_commits:
merge_hash = merge_commits[0]["commit_id"]
message = get_json("%s/commits/%s" % (GITHUB_API_BASE, merge_hash))["commit"]["message"]
print "Pull request %s has already been merged, assuming you want to backport" % pr_num
commit_is_downloaded = run_cmd(['git', 'rev-parse', '--quiet', '--verify',
"%s^{commit}" % merge_hash]).strip() != ""
if not commit_is_downloaded:
fail("Couldn't find any merge commit for #%s, you may need to update HEAD." % pr_num)
print "Found commit %s:\n%s" % (merge_hash, message)
cherry_pick(pr_num, merge_hash, latest_branch)
sys.exit(0)
if not bool(pr["mergeable"]):
msg = "Pull request %s is not mergeable in its current form.\n" % pr_num + \
"Continue? (experts only!)"
continue_maybe(msg)
print ("\n=== Pull Request #%s ===" % pr_num)
print ("PR title\t%s\nCommit title\t%s\nSource\t\t%s\nTarget\t\t%s\nURL\t\t%s" % (
pr_title, commit_title, pr_repo_desc, target_ref, url))
continue_maybe("Proceed with merging pull request #%s?" % pr_num)
merged_refs = [target_ref]
merge_hash = merge_pr(pr_num, target_ref, commit_title, body, pr_repo_desc)
pick_prompt = "Would you like to pick %s into another branch?" % merge_hash
while raw_input("\n%s (y/n): " % pick_prompt).lower() == "y":
merged_refs = merged_refs + [cherry_pick(pr_num, merge_hash, latest_branch)]
if JIRA_IMPORTED:
if JIRA_USERNAME and JIRA_PASSWORD:
continue_maybe("Would you like to update an associated JIRA?")
jira_comment = "Issue resolved by pull request %s\n[%s/%s]" % (pr_num, GITHUB_BASE, pr_num)
resolve_jira_issues(commit_title, merged_refs, jira_comment)
else:
print "JIRA_USERNAME and JIRA_PASSWORD not set"
print "Exiting without trying to close the associated JIRA."
else:
print "Could not find jira-python library. Run 'sudo pip install jira' to install."
print "Exiting without trying to close the associated JIRA."
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if (failure_count):
exit(-1)
main()
| gf53520/kafka | kafka-merge-pr.py | Python | apache-2.0 | 19,703 | 0.004213 |
# -*- coding:utf-8 -*-
from ...errors.httpforbiddenexception import HttpForbiddenException
import saklient
# module saklient.cloud.errors.dontcreateinsandboxexception
class DontCreateInSandboxException(HttpForbiddenException):
## 要求された操作は許可されていません。ゾーンをまたぐ一部のリソースは課金対象です。料金をご確認の上、他のゾーンで作成してください。
## @param {int} status
# @param {str} code=None
# @param {str} message=""
def __init__(self, status, code=None, message=""):
super(DontCreateInSandboxException, self).__init__(status, code, "要求された操作は許可されていません。ゾーンをまたぐ一部のリソースは課金対象です。料金をご確認の上、他のゾーンで作成してください。" if message is None or message == "" else message)
| hnakamur/saklient.python | saklient/cloud/errors/dontcreateinsandboxexception.py | Python | mit | 899 | 0.009331 |
from __future__ import division
__author__ = 'Vladimir Iglovikov'
'''
Merges prediction for https://www.kaggle.com/c/grupo-bimbo-inventory-demand competition
Expm1(Mean([Log1p(x), Log1p(y)]))
'''
import os
import numpy as np
import sys
import pandas as pd
import time
files = sys.argv[1:]
try:
files.remove('mean_log_merger_bimbo.py')
except:
pass
data = [pd.read_csv(fName).sort_values(by='id') for fName in files]
ids = data[0]['id']
result = pd.DataFrame()
submission = pd.DataFrame()
ind = 0
for df in data:
result[ind] = np.log1p(df['Demanda_uni_equil'])
ind += 1
submission['Demanda_uni_equil'] = np.expm1(result.mean(axis=1))
submission['id'] = ids
submission.to_csv('{timestamp}.csv'.format(timestamp=time.time()), index=False) | ternaus/submission_merger | src/mean_log_merger_bimbo.py | Python | mit | 753 | 0.009296 |
from __future__ import absolute_import, print_function
__all__ = ['DummyProvider']
from django.http import HttpResponse
from sentry.identity.base import Provider
from sentry.pipeline import PipelineView
class AskEmail(PipelineView):
def dispatch(self, request, pipeline):
if 'email' in request.POST:
pipeline.bind_state('email', request.POST.get('email'))
return pipeline.next_step()
return HttpResponse(DummyProvider.TEMPLATE)
class DummyProvider(Provider):
name = 'Dummy'
key = 'dummy'
TEMPLATE = '<form method="POST"><input type="email" name="email" /></form>'
def get_pipeline_views(self):
return [AskEmail()]
def build_identity(self, state):
return {
'id': state['email'],
'email': state['email'],
'name': 'Dummy',
}
| looker/sentry | src/sentry/identity/providers/dummy.py | Python | bsd-3-clause | 857 | 0 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates Dart source files from a mojom.Module."""
import mojom.generate.generator as generator
import mojom.generate.module as mojom
import mojom.generate.pack as pack
from mojom.generate.template_expander import UseJinja
_kind_to_dart_default_value = {
mojom.BOOL: "false",
mojom.INT8: "0",
mojom.UINT8: "0",
mojom.INT16: "0",
mojom.UINT16: "0",
mojom.INT32: "0",
mojom.UINT32: "0",
mojom.FLOAT: "0.0",
mojom.HANDLE: "null",
mojom.DCPIPE: "null",
mojom.DPPIPE: "null",
mojom.MSGPIPE: "null",
mojom.SHAREDBUFFER: "null",
mojom.NULLABLE_HANDLE: "null",
mojom.NULLABLE_DCPIPE: "null",
mojom.NULLABLE_DPPIPE: "null",
mojom.NULLABLE_MSGPIPE: "null",
mojom.NULLABLE_SHAREDBUFFER: "null",
mojom.INT64: "0",
mojom.UINT64: "0",
mojom.DOUBLE: "0.0",
mojom.STRING: "null",
mojom.NULLABLE_STRING: "null"
}
_kind_to_dart_decl_type = {
mojom.BOOL: "bool",
mojom.INT8: "int",
mojom.UINT8: "int",
mojom.INT16: "int",
mojom.UINT16: "int",
mojom.INT32: "int",
mojom.UINT32: "int",
mojom.FLOAT: "double",
mojom.HANDLE: "core.RawMojoHandle",
mojom.DCPIPE: "core.RawMojoHandle",
mojom.DPPIPE: "core.RawMojoHandle",
mojom.MSGPIPE: "core.RawMojoHandle",
mojom.SHAREDBUFFER: "core.RawMojoHandle",
mojom.NULLABLE_HANDLE: "core.RawMojoHandle",
mojom.NULLABLE_DCPIPE: "core.RawMojoHandle",
mojom.NULLABLE_DPPIPE: "core.RawMojoHandle",
mojom.NULLABLE_MSGPIPE: "core.RawMojoHandle",
mojom.NULLABLE_SHAREDBUFFER: "core.RawMojoHandle",
mojom.INT64: "int",
mojom.UINT64: "int",
mojom.DOUBLE: "double",
mojom.STRING: "String",
mojom.NULLABLE_STRING: "String"
}
def DartType(kind):
if kind.imported_from:
return kind.imported_from["unique_name"] + "." + kind.name
return kind.name
def DartDefaultValue(field):
if field.default:
if mojom.IsStructKind(field.kind):
assert field.default == "default"
return "new %s()" % DartType(field.kind)
return ExpressionToText(field.default)
if field.kind in mojom.PRIMITIVES:
return _kind_to_dart_default_value[field.kind]
if mojom.IsStructKind(field.kind):
return "null"
if mojom.IsArrayKind(field.kind):
return "null"
if mojom.IsMapKind(field.kind):
return "null"
if mojom.IsInterfaceKind(field.kind) or \
mojom.IsInterfaceRequestKind(field.kind):
return _kind_to_dart_default_value[mojom.MSGPIPE]
if mojom.IsEnumKind(field.kind):
return "0"
def DartDeclType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_dart_decl_type[kind]
if mojom.IsStructKind(kind):
return DartType(kind)
if mojom.IsArrayKind(kind):
array_type = DartDeclType(kind.kind)
return "List<" + array_type + ">"
if mojom.IsMapKind(kind):
key_type = DartDeclType(kind.key_kind)
value_type = DartDeclType(kind.value_kind)
return "Map<"+ key_type + ", " + value_type + ">"
if mojom.IsInterfaceKind(kind) or \
mojom.IsInterfaceRequestKind(kind):
return _kind_to_dart_decl_type[mojom.MSGPIPE]
if mojom.IsEnumKind(kind):
return "int"
def DartPayloadSize(packed):
packed_fields = packed.packed_fields
if not packed_fields:
return 0
last_field = packed_fields[-1]
offset = last_field.offset + last_field.size
pad = pack.GetPad(offset, 8)
return offset + pad
_kind_to_codec_type = {
mojom.BOOL: "bindings.Uint8",
mojom.INT8: "bindings.Int8",
mojom.UINT8: "bindings.Uint8",
mojom.INT16: "bindings.Int16",
mojom.UINT16: "bindings.Uint16",
mojom.INT32: "bindings.Int32",
mojom.UINT32: "bindings.Uint32",
mojom.FLOAT: "bindings.Float",
mojom.HANDLE: "bindings.Handle",
mojom.DCPIPE: "bindings.Handle",
mojom.DPPIPE: "bindings.Handle",
mojom.MSGPIPE: "bindings.Handle",
mojom.SHAREDBUFFER: "bindings.Handle",
mojom.NULLABLE_HANDLE: "bindings.NullableHandle",
mojom.NULLABLE_DCPIPE: "bindings.NullableHandle",
mojom.NULLABLE_DPPIPE: "bindings.NullableHandle",
mojom.NULLABLE_MSGPIPE: "bindings.NullableHandle",
mojom.NULLABLE_SHAREDBUFFER: "bindings.NullableHandle",
mojom.INT64: "bindings.Int64",
mojom.UINT64: "bindings.Uint64",
mojom.DOUBLE: "bindings.Double",
mojom.STRING: "bindings.MojoString",
mojom.NULLABLE_STRING: "bindings.NullableMojoString",
}
def CodecType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_codec_type[kind]
if mojom.IsStructKind(kind):
pointer_type = "NullablePointerTo" if mojom.IsNullableKind(kind) \
else "PointerTo"
return "new bindings.%s(%s)" % (pointer_type, DartType(kind))
if mojom.IsArrayKind(kind):
array_type = "NullableArrayOf" if mojom.IsNullableKind(kind) else "ArrayOf"
array_length = "" if kind.length is None else ", %d" % kind.length
element_type = ElementCodecType(kind.kind)
return "new bindings.%s(%s%s)" % (array_type, element_type, array_length)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return CodecType(mojom.MSGPIPE)
if mojom.IsEnumKind(kind):
return _kind_to_codec_type[mojom.INT32]
if mojom.IsMapKind(kind):
map_type = "NullableMapOf" if mojom.IsNullableKind(kind) else "MapOf"
key_type = ElementCodecType(kind.key_kind)
value_type = ElementCodecType(kind.value_kind)
return "new bindings.%s(%s, %s)" % (map_type, key_type, value_type)
return kind
def ElementCodecType(kind):
return "bindings.PackedBool" if mojom.IsBoolKind(kind) else CodecType(kind)
def DartDecodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return "decodeStruct(%s)" % CodecType(kind)
if mojom.IsStructKind(kind):
return "decodeStructPointer(%s)" % DartType(kind)
if mojom.IsMapKind(kind):
return "decodeMapPointer(%s, %s)" % \
(ElementCodecType(kind.key_kind), ElementCodecType(kind.value_kind))
if mojom.IsArrayKind(kind) and mojom.IsBoolKind(kind.kind):
return "decodeArrayPointer(bindings.PackedBool)"
if mojom.IsArrayKind(kind):
return "decodeArrayPointer(%s)" % CodecType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return DartDecodeSnippet(mojom.MSGPIPE)
if mojom.IsEnumKind(kind):
return DartDecodeSnippet(mojom.INT32)
def DartEncodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return "encodeStruct(%s, " % CodecType(kind)
if mojom.IsStructKind(kind):
return "encodeStructPointer(%s, " % DartType(kind)
if mojom.IsMapKind(kind):
return "encodeMapPointer(%s, %s, " % \
(ElementCodecType(kind.key_kind), ElementCodecType(kind.value_kind))
if mojom.IsArrayKind(kind) and mojom.IsBoolKind(kind.kind):
return "encodeArrayPointer(bindings.PackedBool, ";
if mojom.IsArrayKind(kind):
return "encodeArrayPointer(%s, " % CodecType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return DartEncodeSnippet(mojom.MSGPIPE)
if mojom.IsEnumKind(kind):
return DartEncodeSnippet(mojom.INT32)
def TranslateConstants(token):
if isinstance(token, (mojom.EnumValue, mojom.NamedValue)):
# Both variable and enum constants are constructed like:
# NamespaceUid.Struct.Enum_CONSTANT_NAME
name = ""
if token.imported_from:
name = token.imported_from["unique_name"] + "."
if token.parent_kind:
name = name + token.parent_kind.name + "."
if isinstance(token, mojom.EnumValue):
name = name + token.enum.name + "_"
return name + token.name
if isinstance(token, mojom.BuiltinValue):
if token.value == "double.INFINITY" or token.value == "float.INFINITY":
return "double.INFINITY";
if token.value == "double.NEGATIVE_INFINITY" or \
token.value == "float.NEGATIVE_INFINITY":
return "double.NEGATIVE_INFINITY";
if token.value == "double.NAN" or token.value == "float.NAN":
return "double.NAN";
# Strip leading '+'.
if token[0] == '+':
token = token[1:]
return token
def ExpressionToText(value):
return TranslateConstants(value)
class Generator(generator.Generator):
dart_filters = {
"default_value": DartDefaultValue,
"payload_size": DartPayloadSize,
"decode_snippet": DartDecodeSnippet,
"encode_snippet": DartEncodeSnippet,
"expression_to_text": ExpressionToText,
"dart_decl_type": DartDeclType,
"stylize_method": generator.StudlyCapsToCamel,
}
def GetParameters(self):
return {
"namespace": self.module.namespace,
"imports": self.GetImports(),
"kinds": self.module.kinds,
"enums": self.module.enums,
"module": self.module,
"structs": self.GetStructs() + self.GetStructsFromMethods(),
"interfaces": self.module.interfaces,
"imported_interfaces": self.GetImportedInterfaces(),
"imported_from": self.ImportedFrom(),
}
@UseJinja("dart_templates/module.lib.tmpl", filters=dart_filters)
def GenerateLibModule(self):
return self.GetParameters()
def GenerateFiles(self, args):
self.Write(self.GenerateLibModule(),
self.MatchMojomFilePath("%s.dart" % self.module.name))
def GetImports(self):
used_names = set()
for each_import in self.module.imports:
simple_name = each_import["module_name"].split(".")[0]
# Since each import is assigned a variable in JS, they need to have unique
# names.
unique_name = simple_name
counter = 0
while unique_name in used_names:
counter += 1
unique_name = simple_name + str(counter)
used_names.add(unique_name)
each_import["unique_name"] = unique_name
counter += 1
return self.module.imports
def GetImportedInterfaces(self):
interface_to_import = {}
for each_import in self.module.imports:
for each_interface in each_import["module"].interfaces:
name = each_interface.name
interface_to_import[name] = each_import["unique_name"] + "." + name
return interface_to_import
def ImportedFrom(self):
interface_to_import = {}
for each_import in self.module.imports:
for each_interface in each_import["module"].interfaces:
name = each_interface.name
interface_to_import[name] = each_import["unique_name"] + "."
return interface_to_import
| mohamed--abdel-maksoud/chromium.src | mojo/public/tools/bindings/generators/mojom_dart_generator.py | Python | bsd-3-clause | 11,104 | 0.007745 |
"""NIAGADSOFINQUERY API application.
simplePostwithPython.py get -n <titlename> -i <individualnum> -s <snpnum> -f <tfampath> -p <tpedpath> -a <apitoken>
Usage:
simplePostwithPython.py get -n <titlename> -i <individualnum> -s <snpnum> -f <tfampath> -p <tpedpath> -a <apitoken>
simplePostwithPython.py (-h | --help)
simplePostwithPython.py (-v | --version)
Options:
-n --titlename <titlename> input title
-i --individualnum <individualnum> input individual num
-s --snpnum <snpnum> input snp num
-f --tfampath <tfampath> input tfam path
-p --tpedpath <tpedpath> input tped path
-a --apitoken <apitoken> input api token
-h --help show this screen
-v --version show version and exit
"""
import os
import re
import json
import sys
import getopt
import argparse
from docopt import docopt
from urllib2 import urlopen, Request
import urllib
import urllib2
import requests
arguments = docopt(__doc__, version='0.0.1')
url_phenotypes = 'http://localhost:9000/api/phenotypes'
url_genotypes = 'http://localhost:9000/api/genotypes'
token = 'Bearer ' + arguments['--apitoken']
headers = {'Authorization': '%s' % token}
request_phenotypes = Request(url_phenotypes, headers=headers)
request_genotypes = Request(url_genotypes, headers=headers)
response_phenotypes = urlopen(request_phenotypes)
response_genotypes = urlopen(request_genotypes)
data_phenotypes = json.loads(response_phenotypes.read())
data_genotypes = json.loads(response_genotypes.read())
def postPhenotypes(url_phenotypes, token, headers):
#tfam included familyID individualID dadID momID sex 1or2 or other phenotype 1or2 means case or control
list = []
lines = [line.strip() for line in open(arguments['--tfampath'])]
for line in lines:
ids=line.split()
#print ids
print "{title:"+arguments['--titlename']+",family_id:"+ids[0]+",individual_id:"+ids[1]+",paternal_id:"+ids[2]+",maternal_id:"+ids[3]+",sex:"+ids[4]+",affection_status:"+ids[5]+"}"
values = {"title": arguments['--titlename'], "family_id": ids[0], "individual_id": ids[1], "paternal_id": ids[2], "maternal_id": ids[3], "sex": ids[4], "affection_status": ids[5]}
data = json.dumps(values)
print data
req = requests.post(url_phenotypes, data, headers=headers)
print req.status_code
def postGenotypes(url_genotypes, token, headers):
list = []
lines = [line.strip() for line in open(arguments['--tpedpath'])]
for line in lines:
ids=line.split()
indnum=int(arguments['--individualnum'])
snpnum=int(arguments['--snpnum'])
num = indnum*snpnum
#print ids
strina = ''.join(ids[4:num+4])
call = strina.strip(',')
print "{title:"+arguments['--titlename']+",chr:"+ids[0]+",variant_id:"+ids[1]+",location:"+ids[2]+",coordinate:"+ids[3]+",call:"+call+"}"
values = {"title": arguments['--titlename'], "chr": ids[0], "variant_id": ids[1], "location": ids[2], "coordinate": ids[3], "call": call}
data = json.dumps(values)
print data
req = requests.post(url_genotypes, data, headers=headers)
print req.status_code
postPhenotypes(url_phenotypes, token, headers)
postGenotypes(url_genotypes, token, headers)
| light940929/niagadsofinquery | testexample/simplePostwithPython.py | Python | mit | 3,247 | 0.00616 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as fake_image
from nova.tests.unit import policy_fixture
class HypervisorError(Exception):
"""This is just used to make sure the exception type is in the fault."""
pass
class ServerFaultTestCase(test.TestCase,
integrated_helpers.InstanceHelperMixin):
"""Tests for the server faults reporting from the API."""
def setUp(self):
super(ServerFaultTestCase, self).setUp()
# Setup the standard fixtures.
fake_image.stub_out_image_service(self)
self.addCleanup(fake_image.FakeImageService_reset)
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(func_fixtures.PlacementFixture())
self.useFixture(policy_fixture.RealPolicyFixture())
# Start the compute services.
self.start_service('conductor')
self.start_service('scheduler')
self.compute = self.start_service('compute')
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
self.admin_api = api_fixture.admin_api
def test_server_fault_non_nova_exception(self):
"""Creates a server using the non-admin user, then reboots it which
will generate a non-NovaException fault and put the instance into
ERROR status. Then checks that fault details are only visible to the
admin user.
"""
# Create the server with the non-admin user.
server = self._build_server(
networks=[{'port': nova_fixtures.NeutronFixture.port_1['id']}])
server = self.api.post_server({'server': server})
server = self._wait_for_state_change(server, 'ACTIVE')
# Stop the server before rebooting it so that after the driver.reboot
# method raises an exception, the fake driver does not report the
# instance power state as running - that will make the compute manager
# set the instance vm_state to error.
self.api.post_server_action(server['id'], {'os-stop': None})
server = self._wait_for_state_change(server, 'SHUTOFF')
# Stub out the compute driver reboot method to raise a non-nova
# exception to simulate some error from the underlying hypervisor
# which in this case we are going to say has sensitive content.
error_msg = 'sensitive info'
with mock.patch.object(
self.compute.manager.driver, 'reboot',
side_effect=HypervisorError(error_msg)) as mock_reboot:
reboot_request = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], reboot_request)
# In this case we wait for the status to change to ERROR using
# the non-admin user so we can assert the fault details. We also
# wait for the task_state to be None since the wrap_instance_fault
# decorator runs before the reverts_task_state decorator so we will
# be sure the fault is set on the server.
server = self._wait_for_server_parameter(
server, {'status': 'ERROR', 'OS-EXT-STS:task_state': None},
api=self.api)
mock_reboot.assert_called_once()
# The server fault from the non-admin user API response should not
# have details in it.
self.assertIn('fault', server)
fault = server['fault']
self.assertNotIn('details', fault)
# And the sensitive details from the non-nova exception should not be
# in the message.
self.assertIn('message', fault)
self.assertNotIn(error_msg, fault['message'])
# The exception type class name should be in the message.
self.assertIn('HypervisorError', fault['message'])
# Get the server fault details for the admin user.
server = self.admin_api.get_server(server['id'])
fault = server['fault']
# The admin can see the fault details which includes the traceback.
self.assertIn('details', fault)
# The details also contain the exception message (which is not in the
# fault message).
self.assertIn(error_msg, fault['details'])
# Make sure the traceback is there by looking for part of it.
self.assertIn('in reboot_instance', fault['details'])
# The exception type class name should be in the message for the admin
# user as well since the fault handling code cannot distinguish who
# is going to see the message so it only sets class name.
self.assertIn('HypervisorError', fault['message'])
| rahulunair/nova | nova/tests/functional/test_server_faults.py | Python | apache-2.0 | 5,378 | 0 |
import os
import datetime
import time
IDLE_TIME = 2 * 60
STRESS_ITERATION_TIME = 10 * 60
STRESS_LEVELS = [2,10,25,50,75,100]
# ENDPOINTS = ["http://10.102.44.201/index.php/Special:Random", "http://10.102.44.202/index.php/Special:Random", "http://10.102.44.203/index.php/Special:Random"]
def do_stress():
print("{0}: Starting idle time for {1} seconds".format(datetime.datetime.now(), IDLE_TIME))
time.sleep(IDLE_TIME)
for stress_level in STRESS_LEVELS:
Timestamp = datetime.datetime.now()
print("{0}: Starting stress level {1} for {2} secs".format(
datetime.datetime.now(), stress_level, STRESS_ITERATION_TIME))
os.system("ab -c {0} -n 500000 -l -r http://10.102.44.202/index.php/Special:Random".format(
stress_level))
pass
print("{0}: Stress finished after {1} iterations".format(
datetime.datetime.now(), len(STRESS_LEVELS)))
return
if __name__ == '__main__':
do_stress() | skonefal/workloadsutils | ab_util.py | Python | apache-2.0 | 972 | 0.013374 |
# coding=utf-8
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for PXE driver."""
import os
import tempfile
import fixtures
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils as json
from ironic.common import boot_devices
from ironic.common import dhcp_factory
from ironic.common import exception
from ironic.common.glance_service import base_image_service
from ironic.common import keystone
from ironic.common import pxe_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules import iscsi_deploy
from ironic.drivers.modules import pxe
from ironic.openstack.common import fileutils
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
CONF = cfg.CONF
INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
class PXEValidateParametersTestCase(db_base.DbTestCase):
def test__parse_deploy_info(self):
# make sure we get back the expected things
node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
instance_info=INST_INFO_DICT,
driver_info=DRV_INFO_DICT)
info = pxe._parse_deploy_info(node)
self.assertIsNotNone(info.get('deploy_ramdisk'))
self.assertIsNotNone(info.get('deploy_kernel'))
self.assertIsNotNone(info.get('image_source'))
self.assertIsNotNone(info.get('root_gb'))
self.assertEqual(0, info.get('ephemeral_gb'))
def test__parse_driver_info_missing_deploy_kernel(self):
# make sure error is raised when info is missing
info = dict(DRV_INFO_DICT)
del info['pxe_deploy_kernel']
node = obj_utils.create_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
pxe._parse_driver_info,
node)
def test__parse_driver_info_missing_deploy_ramdisk(self):
# make sure error is raised when info is missing
info = dict(DRV_INFO_DICT)
del info['pxe_deploy_ramdisk']
node = obj_utils.create_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
pxe._parse_driver_info,
node)
def test__parse_driver_info_good(self):
# make sure we get back the expected things
node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
driver_info=DRV_INFO_DICT)
info = pxe._parse_driver_info(node)
self.assertIsNotNone(info.get('deploy_ramdisk'))
self.assertIsNotNone(info.get('deploy_kernel'))
class PXEPrivateMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(PXEPrivateMethodsTestCase, self).setUp()
n = {
'driver': 'fake_pxe',
'instance_info': INST_INFO_DICT,
'driver_info': DRV_INFO_DICT,
}
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
self.node = obj_utils.create_test_node(self.context, **n)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test__get_image_info(self, show_mock):
properties = {'properties': {u'kernel_id': u'instance_kernel_uuid',
u'ramdisk_id': u'instance_ramdisk_uuid'}}
expected_info = {'ramdisk':
('instance_ramdisk_uuid',
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'ramdisk')),
'kernel':
('instance_kernel_uuid',
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'kernel')),
'deploy_ramdisk':
(DRV_INFO_DICT['pxe_deploy_ramdisk'],
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'deploy_ramdisk')),
'deploy_kernel':
(DRV_INFO_DICT['pxe_deploy_kernel'],
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'deploy_kernel'))}
show_mock.return_value = properties
image_info = pxe._get_image_info(self.node, self.context)
show_mock.assert_called_once_with('glance://image_uuid',
method='get')
self.assertEqual(expected_info, image_info)
# test with saved info
show_mock.reset_mock()
image_info = pxe._get_image_info(self.node, self.context)
self.assertEqual(expected_info, image_info)
self.assertFalse(show_mock.called)
self.assertEqual('instance_kernel_uuid',
self.node.instance_info.get('kernel'))
self.assertEqual('instance_ramdisk_uuid',
self.node.instance_info.get('ramdisk'))
@mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options')
@mock.patch.object(pxe_utils, '_build_pxe_config')
def _test_build_pxe_config_options(self, build_pxe_mock, deploy_opts_mock,
ipxe_enabled=False):
self.config(pxe_append_params='test_param', group='pxe')
# NOTE: right '/' should be removed from url string
self.config(api_url='http://192.168.122.184:6385/', group='conductor')
self.config(disk_devices='sda', group='pxe')
fake_deploy_opts = {'iscsi_target_iqn': 'fake-iqn',
'deployment_id': 'fake-deploy-id',
'deployment_key': 'fake-deploy-key',
'disk': 'fake-disk',
'ironic_api_url': 'fake-api-url',
'boot_option': 'netboot'}
deploy_opts_mock.return_value = fake_deploy_opts
tftp_server = CONF.pxe.tftp_server
if ipxe_enabled:
http_url = 'http://192.1.2.3:1234'
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url=http_url, group='pxe')
deploy_kernel = os.path.join(http_url, self.node.uuid,
'deploy_kernel')
deploy_ramdisk = os.path.join(http_url, self.node.uuid,
'deploy_ramdisk')
kernel = os.path.join(http_url, self.node.uuid, 'kernel')
ramdisk = os.path.join(http_url, self.node.uuid, 'ramdisk')
root_dir = CONF.pxe.http_root
else:
deploy_kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_kernel')
deploy_ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_ramdisk')
kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'kernel')
ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'ramdisk')
root_dir = CONF.pxe.tftp_root
expected_options = {
'ari_path': ramdisk,
'deployment_ari_path': deploy_ramdisk,
'pxe_append_params': 'test_param',
'aki_path': kernel,
'deployment_aki_path': deploy_kernel,
'tftp_server': tftp_server,
'boot_option': 'netboot'
}
expected_options.update(fake_deploy_opts)
image_info = {'deploy_kernel': ('deploy_kernel',
os.path.join(root_dir,
self.node.uuid,
'deploy_kernel')),
'deploy_ramdisk': ('deploy_ramdisk',
os.path.join(root_dir,
self.node.uuid,
'deploy_ramdisk')),
'kernel': ('kernel_id',
os.path.join(root_dir,
self.node.uuid,
'kernel')),
'ramdisk': ('ramdisk_id',
os.path.join(root_dir,
self.node.uuid,
'ramdisk'))
}
options = pxe._build_pxe_config_options(self.node,
image_info,
self.context)
self.assertEqual(expected_options, options)
def test__build_pxe_config_options(self):
self._test_build_pxe_config_options(ipxe_enabled=False)
def test__build_pxe_config_options_ipxe(self):
self._test_build_pxe_config_options(ipxe_enabled=True)
def test_get_token_file_path(self):
node_uuid = self.node.uuid
self.assertEqual('/tftpboot/token-' + node_uuid,
pxe._get_token_file_path(node_uuid))
@mock.patch.object(deploy_utils, 'fetch_images')
def test__cache_tftp_images_master_path(self, mock_fetch_image):
temp_dir = tempfile.mkdtemp()
self.config(tftp_root=temp_dir, group='pxe')
self.config(tftp_master_path=os.path.join(temp_dir,
'tftp_master_path'),
group='pxe')
image_path = os.path.join(temp_dir, self.node.uuid,
'deploy_kernel')
image_info = {'deploy_kernel': ('deploy_kernel', image_path)}
fileutils.ensure_tree(CONF.pxe.tftp_master_path)
pxe._cache_ramdisk_kernel(None, self.node, image_info)
mock_fetch_image.assert_called_once_with(None,
mock.ANY,
[('deploy_kernel',
image_path)],
True)
@mock.patch.object(pxe, 'TFTPImageCache', lambda: None)
@mock.patch.object(fileutils, 'ensure_tree')
@mock.patch.object(deploy_utils, 'fetch_images')
def test__cache_ramdisk_kernel(self, mock_fetch_image, mock_ensure_tree):
self.config(ipxe_enabled=False, group='pxe')
fake_pxe_info = {'foo': 'bar'}
expected_path = os.path.join(CONF.pxe.tftp_root, self.node.uuid)
pxe._cache_ramdisk_kernel(self.context, self.node, fake_pxe_info)
mock_ensure_tree.assert_called_with(expected_path)
mock_fetch_image.assert_called_once_with(self.context, mock.ANY,
fake_pxe_info.values(), True)
@mock.patch.object(pxe, 'TFTPImageCache', lambda: None)
@mock.patch.object(fileutils, 'ensure_tree')
@mock.patch.object(deploy_utils, 'fetch_images')
def test__cache_ramdisk_kernel_ipxe(self, mock_fetch_image,
mock_ensure_tree):
self.config(ipxe_enabled=True, group='pxe')
fake_pxe_info = {'foo': 'bar'}
expected_path = os.path.join(CONF.pxe.http_root, self.node.uuid)
pxe._cache_ramdisk_kernel(self.context, self.node, fake_pxe_info)
mock_ensure_tree.assert_called_with(expected_path)
mock_fetch_image.assert_called_once_with(self.context, mock.ANY,
fake_pxe_info.values(),
True)
class PXEDriverTestCase(db_base.DbTestCase):
def setUp(self):
super(PXEDriverTestCase, self).setUp()
self.context.auth_token = '4562138218392831'
self.temp_dir = tempfile.mkdtemp()
self.config(tftp_root=self.temp_dir, group='pxe')
self.temp_dir = tempfile.mkdtemp()
self.config(images_path=self.temp_dir, group='pxe')
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
instance_info = INST_INFO_DICT
instance_info['deploy_key'] = 'fake-56789'
self.node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
instance_info=instance_info,
driver_info=DRV_INFO_DICT)
self.port = obj_utils.create_test_port(self.context,
node_id=self.node.id)
self.config(group='conductor', api_url='http://127.0.0.1:1234/')
def _create_token_file(self):
token_path = pxe._get_token_file_path(self.node.uuid)
open(token_path, 'w').close()
return token_path
def test_get_properties(self):
expected = pxe.COMMON_PROPERTIES
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(expected, task.driver.get_properties())
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_good(self, mock_glance):
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.deploy.validate(task)
def test_validate_fail(self):
info = dict(INST_INFO_DICT)
del info['image_source']
self.node.instance_info = json.dumps(info)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node['instance_info'] = json.dumps(info)
self.assertRaises(exception.MissingParameterValue,
task.driver.deploy.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_invalid_boot_mode(self, mock_glance):
properties = {'capabilities': 'boot_mode:foo,cap2:value2'}
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_invalid_config_uefi_ipxe(self, mock_glance):
properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url='dummy_url', group='pxe')
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_invalid_boot_option(self, mock_glance):
properties = {'capabilities': 'boot_option:foo,dog:wuff'}
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_invalid_uefi_and_localboot(self, mock_glance):
properties = {'capabilities': 'boot_mode:uefi,boot_option:local'}
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
def test_validate_fail_no_port(self):
new_node = obj_utils.create_test_node(
self.context,
uuid='aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
driver='fake_pxe', instance_info=INST_INFO_DICT,
driver_info=DRV_INFO_DICT)
with task_manager.acquire(self.context, new_node.uuid,
shared=True) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.deploy.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
@mock.patch.object(keystone, 'get_service_url')
def test_validate_good_api_url_from_config_file(self, mock_ks,
mock_glance):
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
# not present in the keystone catalog
mock_ks.side_effect = exception.KeystoneFailure
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.deploy.validate(task)
self.assertFalse(mock_ks.called)
@mock.patch.object(base_image_service.BaseImageService, '_show')
@mock.patch.object(keystone, 'get_service_url')
def test_validate_good_api_url_from_keystone(self, mock_ks, mock_glance):
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
# present in the keystone catalog
mock_ks.return_value = 'http://127.0.0.1:1234'
# not present in the config file
self.config(group='conductor', api_url=None)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.deploy.validate(task)
mock_ks.assert_called_once_with()
@mock.patch.object(keystone, 'get_service_url')
def test_validate_fail_no_api_url(self, mock_ks):
# not present in the keystone catalog
mock_ks.side_effect = exception.KeystoneFailure
# not present in the config file
self.config(group='conductor', api_url=None)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
mock_ks.assert_called_once_with()
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_no_image_kernel_ramdisk_props(self, mock_glance):
mock_glance.return_value = {'properties': {}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.deploy.validate,
task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_glance_image_doesnt_exists(self, mock_glance):
mock_glance.side_effect = exception.ImageNotFound('not found')
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test_validate_fail_glance_conn_problem(self, mock_glance):
exceptions = (exception.GlanceConnectionFailed('connection fail'),
exception.ImageNotAuthorized('not authorized'),
exception.Invalid('invalid'))
mock_glance.side_effect = exceptions
for exc in exceptions:
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.deploy.validate, task)
def test_vendor_passthru_validate_good(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.vendor.validate(task, method='pass_deploy_info',
address='123456', iqn='aaa-bbb',
key='fake-56789')
def test_vendor_passthru_validate_fail(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.vendor.validate,
task, method='pass_deploy_info',
key='fake-56789')
def test_vendor_passthru_validate_key_notmatch(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.vendor.validate,
task, method='pass_deploy_info',
address='123456', iqn='aaa-bbb',
key='fake-12345')
@mock.patch.object(pxe, '_get_image_info')
@mock.patch.object(pxe, '_cache_ramdisk_kernel')
@mock.patch.object(pxe, '_build_pxe_config_options')
@mock.patch.object(pxe_utils, 'create_pxe_config')
def test_prepare(self, mock_pxe_config,
mock_build_pxe, mock_cache_r_k,
mock_img_info):
mock_build_pxe.return_value = None
mock_img_info.return_value = None
mock_pxe_config.return_value = None
mock_cache_r_k.return_value = None
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.deploy.prepare(task)
mock_img_info.assert_called_once_with(task.node,
self.context)
mock_pxe_config.assert_called_once_with(
task, None, CONF.pxe.pxe_config_template)
mock_cache_r_k.assert_called_once_with(self.context,
task.node, None)
@mock.patch.object(keystone, 'token_expires_soon')
@mock.patch.object(deploy_utils, 'get_image_mb')
@mock.patch.object(iscsi_deploy, '_get_image_file_path')
@mock.patch.object(iscsi_deploy, 'cache_instance_image')
@mock.patch.object(dhcp_factory.DHCPFactory, 'update_dhcp')
@mock.patch.object(manager_utils, 'node_power_action')
@mock.patch.object(manager_utils, 'node_set_boot_device')
def test_deploy(self, mock_node_set_boot, mock_node_power_action,
mock_update_dhcp, mock_cache_instance_image,
mock_get_image_file_path, mock_get_image_mb, mock_expire):
fake_img_path = '/test/path/test.img'
mock_get_image_file_path.return_value = fake_img_path
mock_get_image_mb.return_value = 1
mock_expire.return_value = False
self.config(deploy_callback_timeout=600, group='conductor')
with task_manager.acquire(self.context,
self.node.uuid, shared=False) as task:
dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
state = task.driver.deploy.deploy(task)
self.assertEqual(state, states.DEPLOYWAIT)
mock_cache_instance_image.assert_called_once_with(
self.context, task.node)
mock_get_image_file_path.assert_called_once_with(task.node.uuid)
mock_get_image_mb.assert_called_once_with(fake_img_path)
mock_update_dhcp.assert_called_once_with(task, dhcp_opts)
mock_expire.assert_called_once_with(self.context.auth_token, 600)
mock_node_set_boot.assert_called_once_with(task, 'pxe',
persistent=True)
mock_node_power_action.assert_called_once_with(task, states.REBOOT)
# ensure token file created
t_path = pxe._get_token_file_path(self.node.uuid)
token = open(t_path, 'r').read()
self.assertEqual(self.context.auth_token, token)
@mock.patch.object(keystone, 'get_admin_auth_token')
@mock.patch.object(keystone, 'token_expires_soon')
@mock.patch.object(deploy_utils, 'get_image_mb')
@mock.patch.object(iscsi_deploy, '_get_image_file_path')
@mock.patch.object(iscsi_deploy, 'cache_instance_image')
@mock.patch.object(dhcp_factory.DHCPFactory, 'update_dhcp')
@mock.patch.object(manager_utils, 'node_power_action')
@mock.patch.object(manager_utils, 'node_set_boot_device')
def test_deploy_token_near_expiration(self, mock_node_set_boot,
mock_node_power_action, mock_update_dhcp,
mock_cache_instance_image, mock_get_image_file_path,
mock_get_image_mb, mock_expire, mock_admin_token):
mock_get_image_mb.return_value = 1
mock_expire.return_value = True
new_token = 'new_admin_token'
mock_admin_token.return_value = new_token
self.config(deploy_callback_timeout=600, group='conductor')
with task_manager.acquire(self.context,
self.node.uuid, shared=False) as task:
task.driver.deploy.deploy(task)
mock_expire.assert_called_once_with(self.context.auth_token, 600)
mock_admin_token.assert_called_once_with()
# ensure token file created with new token
t_path = pxe._get_token_file_path(self.node.uuid)
token = open(t_path, 'r').read()
self.assertEqual(new_token, token)
@mock.patch.object(deploy_utils, 'get_image_mb')
@mock.patch.object(iscsi_deploy, '_get_image_file_path')
@mock.patch.object(iscsi_deploy, 'cache_instance_image')
def test_deploy_image_too_large(self, mock_cache_instance_image,
mock_get_image_file_path,
mock_get_image_mb):
fake_img_path = '/test/path/test.img'
mock_get_image_file_path.return_value = fake_img_path
mock_get_image_mb.return_value = 999999
with task_manager.acquire(self.context,
self.node.uuid, shared=False) as task:
self.assertRaises(exception.InstanceDeployFailure,
task.driver.deploy.deploy, task)
mock_cache_instance_image.assert_called_once_with(
self.context, task.node)
mock_get_image_file_path.assert_called_once_with(task.node.uuid)
mock_get_image_mb.assert_called_once_with(fake_img_path)
@mock.patch.object(manager_utils, 'node_power_action')
def test_tear_down(self, node_power_mock):
with task_manager.acquire(self.context,
self.node.uuid) as task:
state = task.driver.deploy.tear_down(task)
self.assertEqual(states.DELETED, state)
node_power_mock.assert_called_once_with(task, states.POWER_OFF)
@mock.patch.object(dhcp_factory.DHCPFactory, 'update_dhcp')
def test_take_over(self, update_dhcp_mock):
with task_manager.acquire(
self.context, self.node.uuid, shared=True) as task:
dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
task.driver.deploy.take_over(task)
update_dhcp_mock.assert_called_once_with(
task, dhcp_opts)
@mock.patch.object(pxe_utils, 'clean_up_pxe_config')
@mock.patch.object(dhcp_factory.DHCPFactory, 'update_dhcp')
def test_take_over_localboot(self, update_dhcp_mock, clean_pxe_mock):
with task_manager.acquire(
self.context, self.node.uuid, shared=True) as task:
task.node.instance_info['capabilities'] = {"boot_option": "local"}
dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
task.driver.deploy.take_over(task)
update_dhcp_mock.assert_called_once_with(
task, dhcp_opts)
clean_pxe_mock.assert_called_once_with(task)
@mock.patch.object(pxe_utils, 'clean_up_pxe_config')
@mock.patch.object(manager_utils, 'node_set_boot_device')
@mock.patch.object(deploy_utils, 'notify_deploy_complete')
@mock.patch.object(deploy_utils, 'switch_pxe_config')
@mock.patch.object(iscsi_deploy, 'InstanceImageCache')
def _test_continue_deploy(self, is_localboot, mock_image_cache,
mock_switch_config, notify_mock,
mock_node_boot_dev, mock_clean_pxe):
token_path = self._create_token_file()
# set local boot
if is_localboot:
i_info = self.node.instance_info
i_info['capabilities'] = '{"boot_option": "local"}'
self.node.instance_info = i_info
self.node.power_state = states.POWER_ON
self.node.provision_state = states.DEPLOYWAIT
self.node.target_provision_state = states.ACTIVE
self.node.save()
root_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
boot_mode = None
def fake_deploy(**kwargs):
return root_uuid
self.useFixture(fixtures.MonkeyPatch(
'ironic.drivers.modules.deploy_utils.deploy',
fake_deploy))
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.vendor._continue_deploy(
task, address='123456', iqn='aaa-bbb', key='fake-56789')
self.node.refresh()
self.assertEqual(states.ACTIVE, self.node.provision_state)
self.assertEqual(states.NOSTATE, self.node.target_provision_state)
self.assertEqual(states.POWER_ON, self.node.power_state)
self.assertIsNone(self.node.last_error)
self.assertFalse(os.path.exists(token_path))
mock_image_cache.assert_called_once_with()
mock_image_cache.return_value.clean_up.assert_called_once_with()
pxe_config_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
notify_mock.assert_called_once_with('123456')
if is_localboot:
mock_node_boot_dev.assert_called_once_with(
mock.ANY, boot_devices.DISK, persistent=True)
mock_clean_pxe.assert_called_once_with(mock.ANY)
self.assertFalse(mock_switch_config.called)
else:
mock_switch_config.assert_called_once_with(
pxe_config_path, root_uuid, boot_mode)
self.assertFalse(mock_node_boot_dev.called)
self.assertFalse(mock_clean_pxe.called)
def test_continue_deploy(self):
self._test_continue_deploy(False)
def test_continue_deploy_localboot(self):
self._test_continue_deploy(True)
@mock.patch.object(iscsi_deploy, 'InstanceImageCache')
def test_continue_deploy_fail(self, mock_image_cache):
token_path = self._create_token_file()
self.node.power_state = states.POWER_ON
self.node.provision_state = states.DEPLOYWAIT
self.node.target_provision_state = states.ACTIVE
self.node.save()
def fake_deploy(**kwargs):
raise exception.InstanceDeployFailure("test deploy error")
self.useFixture(fixtures.MonkeyPatch(
'ironic.drivers.modules.deploy_utils.deploy',
fake_deploy))
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.vendor._continue_deploy(
task, address='123456', iqn='aaa-bbb', key='fake-56789')
self.node.refresh()
self.assertEqual(states.DEPLOYFAIL, self.node.provision_state)
self.assertEqual(states.ACTIVE, self.node.target_provision_state)
self.assertEqual(states.POWER_OFF, self.node.power_state)
self.assertIsNotNone(self.node.last_error)
self.assertFalse(os.path.exists(token_path))
mock_image_cache.assert_called_once_with()
mock_image_cache.return_value.clean_up.assert_called_once_with()
@mock.patch.object(iscsi_deploy, 'InstanceImageCache')
def test_continue_deploy_ramdisk_fails(self, mock_image_cache):
token_path = self._create_token_file()
self.node.power_state = states.POWER_ON
self.node.provision_state = states.DEPLOYWAIT
self.node.target_provision_state = states.ACTIVE
self.node.save()
def fake_deploy(**kwargs):
pass
self.useFixture(fixtures.MonkeyPatch(
'ironic.drivers.modules.deploy_utils.deploy',
fake_deploy))
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.vendor._continue_deploy(
task, address='123456', iqn='aaa-bbb',
key='fake-56789', error='test ramdisk error')
self.node.refresh()
self.assertEqual(states.DEPLOYFAIL, self.node.provision_state)
self.assertEqual(states.ACTIVE, self.node.target_provision_state)
self.assertEqual(states.POWER_OFF, self.node.power_state)
self.assertIsNotNone(self.node.last_error)
self.assertFalse(os.path.exists(token_path))
mock_image_cache.assert_called_once_with()
mock_image_cache.return_value.clean_up.assert_called_once_with()
def test_continue_deploy_invalid(self):
self.node.power_state = states.POWER_ON
self.node.provision_state = states.AVAILABLE
self.node.target_provision_state = states.NOSTATE
self.node.save()
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.InvalidState,
task.driver.vendor._continue_deploy,
task, address='123456', iqn='aaa-bbb',
key='fake-56789', error='test ramdisk error')
self.node.refresh()
self.assertEqual(states.AVAILABLE, self.node.provision_state)
self.assertEqual(states.NOSTATE, self.node.target_provision_state)
self.assertEqual(states.POWER_ON, self.node.power_state)
def test_lock_elevated(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
with mock.patch.object(task.driver.vendor,
'_continue_deploy') as _cont_deploy_mock:
task.driver.vendor._continue_deploy(
task, address='123456', iqn='aaa-bbb', key='fake-56789')
# lock elevated w/o exception
self.assertEqual(1, _cont_deploy_mock.call_count,
"_continue_deploy was not called once.")
def test_vendor_routes(self):
expected = ['pass_deploy_info']
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
vendor_routes = task.driver.vendor.vendor_routes
self.assertIsInstance(vendor_routes, dict)
self.assertEqual(expected, list(vendor_routes))
def test_driver_routes(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
driver_routes = task.driver.vendor.driver_routes
self.assertIsInstance(driver_routes, dict)
self.assertEqual({}, driver_routes)
@mock.patch.object(utils, 'unlink_without_raise')
@mock.patch.object(iscsi_deploy, 'destroy_images')
@mock.patch.object(pxe_utils, 'clean_up_pxe_config')
@mock.patch.object(pxe, 'TFTPImageCache')
@mock.patch.object(pxe, '_get_image_info')
class CleanUpTestCase(db_base.DbTestCase):
def setUp(self):
super(CleanUpTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
instance_info = INST_INFO_DICT
instance_info['deploy_key'] = 'fake-56789'
self.node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
instance_info=instance_info,
driver_info=DRV_INFO_DICT)
def test_clean_up(self, mock_image_info, mock_cache, mock_pxe_clean,
mock_iscsi_clean, mock_unlink):
mock_image_info.return_value = {'label': ['', 'deploy_kernel']}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.deploy.clean_up(task)
mock_image_info.assert_called_once_with(task.node,
task.context)
mock_pxe_clean.assert_called_once_with(task)
mock_unlink.assert_any_call('deploy_kernel')
mock_unlink.assert_any_call(pxe._get_token_file_path(
task.node.uuid))
mock_iscsi_clean.assert_called_once_with(task.node.uuid)
mock_cache.return_value.clean_up.assert_called_once_with()
def test_clean_up_fail_get_image_info(self, mock_image_info, mock_cache,
mock_pxe_clean, mock_iscsi_clean,
mock_unlink):
mock_image_info.side_effect = exception.MissingParameterValue('foo')
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.deploy.clean_up(task)
mock_image_info.assert_called_once_with(task.node,
task.context)
mock_pxe_clean.assert_called_once_with(task)
mock_unlink.assert_called_once_with(pxe._get_token_file_path(
task.node.uuid))
mock_iscsi_clean.assert_called_once_with(task.node.uuid)
mock_cache.return_value.clean_up.assert_called_once_with()
class CleanUpFullFlowTestCase(db_base.DbTestCase):
def setUp(self):
super(CleanUpFullFlowTestCase, self).setUp()
self.config(image_cache_size=0, group='pxe')
# Configure node
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
instance_info = INST_INFO_DICT
instance_info['deploy_key'] = 'fake-56789'
self.node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
instance_info=instance_info,
driver_info=DRV_INFO_DICT)
self.port = obj_utils.create_test_port(self.context,
node_id=self.node.id)
# Configure temporary directories
pxe_temp_dir = tempfile.mkdtemp()
self.config(tftp_root=pxe_temp_dir, group='pxe')
tftp_master_dir = os.path.join(CONF.pxe.tftp_root,
'tftp_master')
self.config(tftp_master_path=tftp_master_dir, group='pxe')
os.makedirs(tftp_master_dir)
instance_temp_dir = tempfile.mkdtemp()
self.config(images_path=instance_temp_dir,
group='pxe')
instance_master_dir = os.path.join(CONF.pxe.images_path,
'instance_master')
self.config(instance_master_path=instance_master_dir,
group='pxe')
os.makedirs(instance_master_dir)
self.pxe_config_dir = os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg')
os.makedirs(self.pxe_config_dir)
# Populate some file names
self.master_kernel_path = os.path.join(CONF.pxe.tftp_master_path,
'kernel')
self.master_instance_path = os.path.join(CONF.pxe.instance_master_path,
'image_uuid')
self.node_tftp_dir = os.path.join(CONF.pxe.tftp_root,
self.node.uuid)
os.makedirs(self.node_tftp_dir)
self.kernel_path = os.path.join(self.node_tftp_dir,
'kernel')
self.node_image_dir = iscsi_deploy._get_image_dir_path(self.node.uuid)
os.makedirs(self.node_image_dir)
self.image_path = iscsi_deploy._get_image_file_path(self.node.uuid)
self.config_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
self.mac_path = pxe_utils._get_pxe_mac_path(self.port.address)
self.token_path = pxe._get_token_file_path(self.node.uuid)
# Create files
self.files = [self.config_path, self.master_kernel_path,
self.master_instance_path, self.token_path]
for fname in self.files:
# NOTE(dtantsur): files with 0 size won't be cleaned up
with open(fname, 'w') as fp:
fp.write('test')
os.link(self.config_path, self.mac_path)
os.link(self.master_kernel_path, self.kernel_path)
os.link(self.master_instance_path, self.image_path)
@mock.patch.object(pxe, '_get_image_info')
def test_clean_up_with_master(self, mock_get_image_info):
image_info = {'kernel': ('kernel_uuid',
self.kernel_path)}
mock_get_image_info.return_value = image_info
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.deploy.clean_up(task)
mock_get_image_info.assert_called_once_with(task.node,
task.context)
for path in ([self.kernel_path, self.image_path, self.config_path]
+ self.files):
self.assertFalse(os.path.exists(path),
'%s is not expected to exist' % path)
| ramineni/myironic | ironic/tests/drivers/test_pxe.py | Python | apache-2.0 | 44,025 | 0.000409 |
import random
import sys
class MarkovChain(object):
def __init__(self, separator = None, corpus = None):
self.separator = separator
self.corpus = corpus
self.chain = self.setChain()
def setChain(self):
chain = {}
if self.separator is None:
allItems = self.corpus.split()
else:
allItems = self.corpus.split(self.separator)
mx = allItems[len(allItems)-1]
for i,x in enumerate(allItems):
if i == len(allItems) -1 :
pass
else:
try:
chain[x].append(allItems[i+1])
except KeyError as e:
chain[x] =[allItems[i+1]]
try:
chain[mx].append("\n")
except KeyError as e:
chain[mx] = ["\n"]
return(chain)
def printSth(self,maxItems = 20):
res =""
t = random.choice(self.chain.keys())
for i in range(0,maxItems):
try:
print(self.chain[t])
tmp = random.choice(self.chain[t])
res += " "+tmp
t= tmp
except KeyError as e:
return(res)
return(res)
def main():
mc = MarkovChain(corpus = open(sys.argv[1],'r').read(), separator = " ")
#print(mc.chain)
print(mc.printSth(int(sys.argv[2])))
if __name__ == "__main__":
main()
| gglyptodon/marcook | markovchain.py | Python | gpl-3.0 | 1,417 | 0.015526 |
# -*- coding: utf-8 -*-
"""
module that specified how we choose paramaters based on current search database
properties
"""
from __future__ import absolute_import, division, print_function
#import six
import utool as ut
#import numpy as np
#import vtool as vt
#from ibeis.algo.hots import hstypes
#from ibeis.algo.hots import match_chips4 as mc4
#from ibeis.algo.hots import distinctiveness_normalizer
#from six.moves import filter
print, print_, printDBG, rrr, profile = ut.inject(__name__, '[autoparams]')
@profile
def choose_vsmany_K(num_names, qaids, daids):
"""
TODO: Should also scale up the number of checks as well
method for choosing K in the initial vsmany queries
Ignore:
>>> # DISABLE_DOCTEST
>>> # Shows plot for K vs number of names
>>> from ibeis.algo.hots.automated_params import * # NOQA
>>> import ibeis
>>> from ibeis import constants as const
>>> ibs = ibeis.opendb(defaultdb='testdb1')
>>> valid_aids = ibs.get_valid_aids(species=const.TEST_SPECIES.ZEB_PLAIN)
>>> num_names = np.arange(0, 1000)
>>> num_names_slope = .1
>>> K_max = 10
>>> K_min = 1
>>> K_list = np.floor(num_names_slope * num_names)
>>> K_list[K_list > K_max] = K_max
>>> K_list[K_list < K_min] = K_min
>>> clip_index_list = np.where(K_list >= K_max)[0]
>>> clip_index = clip_index_list[min(len(clip_index_list) - 1, 10)]
>>> K_list = K_list[0:clip_index]
>>> num_names = num_names[0:clip_index]
>>> pt.plot2(num_names, K_list, x_label='num_names', y_label='K',
... equal_aspect=False, marker='g-', pad=1, dark=True)
>>> pt.update()
"""
#K = ibs.cfg.query_cfg.nn_cfg.K
# TODO: paramaterize in config
num_names_slope = .1 # increase K every fifty names
K_max = 10
K_min = 1
num_names_lower = K_min / num_names_slope
num_names_upper = K_max / num_names_slope
if num_names < num_names_lower:
K = K_min
elif num_names < num_names_upper:
K = num_names_slope * num_names
else:
K = K_max
with ut.embed_on_exception_context:
if len(ut.intersect_ordered(qaids, daids)) > 0:
# if self is in query bump k
K += 1
return K
if __name__ == '__main__':
"""
CommandLine:
python -m ibeis.algo.hots.automated_params
python -m ibeis.algo.hots.automated_params --allexamples
python -m ibeis.algo.hots.automated_params --allexamples --noface --nosrc
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()
| SU-ECE-17-7/ibeis | _broken/_old_qt_hs_matcher/automated_params.py | Python | apache-2.0 | 2,697 | 0.003708 |
#!/usr/bin/env python
import freenect
import signal
import matplotlib.pyplot as mp
from misc.demo import frame_convert
mp.ion()
image_rgb = None
image_depth = None
keep_running = True
def display_depth(dev, data, timestamp):
global image_depth
data = frame_convert.pretty_depth(data)
mp.gray()
mp.figure(1)
if image_depth:
image_depth.set_data(data)
else:
image_depth = mp.imshow(data, interpolation='nearest', animated=True)
mp.draw()
def display_rgb(dev, data, timestamp):
global image_rgb
mp.figure(2)
if image_rgb:
image_rgb.set_data(data)
else:
image_rgb = mp.imshow(data, interpolation='nearest', animated=True)
mp.draw()
def body(*args):
if not keep_running:
raise freenect.Kill
def handler(signum, frame):
global keep_running
keep_running = False
print('Press Ctrl-C in terminal to stop')
signal.signal(signal.SIGINT, handler)
freenect.runloop(depth=display_depth,
video=display_rgb,
body=body)
| Dining-Engineers/left-luggage-detection | misc/demo/demo_mp_async.py | Python | gpl-2.0 | 1,051 | 0 |
""" Notices indicate how a regulation has changed since the last version. This
module contains code to compile a regulation from a notice's changes. """
from bisect import bisect
from collections import defaultdict
import copy
import itertools
import logging
from regparser.grammar.tokens import Verb
from regparser.tree.struct import Node, find
from regparser.tree.xml_parser import interpretations
from regparser.tree.xml_parser import tree_utils
from regparser.utils import roman_nums
def get_parent_label(node):
""" Given a node, get the label of it's parent. """
if node.node_type == Node.SUBPART:
return node.label[0]
elif node.node_type == Node.INTERP:
marker_position = node.label.index(Node.INTERP_MARK)
interpreting = node.label[:marker_position]
comment_pars = node.label[marker_position + 1:]
if comment_pars: # 111-3-a-Interp-4-i
return '-'.join(node.label[:-1])
elif len(interpreting) > 1: # 111-3-a-Interp
return '-'.join(interpreting[:-1] + [Node.INTERP_MARK])
else: # 111-Interp
return node.label[0]
else:
parent_label = node.label[:-1]
return '-'.join(parent_label)
def make_label_sortable(label, roman=False):
""" Make labels sortable, but converting them as appropriate.
Also, appendices have labels that look like 30(a), we make those
appropriately sortable. """
if label.isdigit():
return (int(label),)
if roman:
romans = list(itertools.islice(roman_nums(), 0, 50))
return (1 + romans.index(label),)
# segment the label piece into component parts
# e.g. 45Ai33b becomes (45, 'A', 'i', 33, 'b')
INT, UPPER, LOWER = 1, 2, 3
segments, segment, seg_type = [], "", None
for ch in label:
if ch.isdigit():
ch_type = INT
elif ch.isalpha() and ch == ch.upper():
ch_type = UPPER
elif ch.isalpha() and ch == ch.lower():
ch_type = LOWER
else:
# other character, e.g. parens, guarantee segmentation
ch_type = None
if ch_type != seg_type and segment: # new type of character
segments.append(segment)
segment = ""
seg_type = ch_type
if ch_type:
segment += ch
if segment: # ended with something other than a paren
segments.append(segment)
segments = [int(seg) if seg.isdigit() else seg for seg in segments]
return tuple(segments)
def make_root_sortable(label, node_type):
""" Child nodes of the root contain nodes of various types, these
need to be sorted correctly. This returns a tuple to help
sort these first level nodes. """
if node_type == Node.SUBPART or node_type == Node.EMPTYPART:
return (0, label[-1])
elif node_type == Node.APPENDIX:
return (1, label[-1])
elif node_type == Node.INTERP:
return (2,)
def replace_first_sentence(text, replacement):
""" Replace the first sentence in text with replacement. This makes
some incredibly simplifying assumptions - so buyer beware. """
no_periods_replacement = replacement.replace('.', '')
sentences = text.split('.', 1)
if len(sentences) > 1:
sentences[0] = no_periods_replacement
return '.'.join(sentences)
else:
return replacement
def overwrite_marker(origin, new_label):
""" The node passed in has a label, but we're going to give it a
new one (new_label). This is necessary during node moves. """
if origin.node_type == Node.REGTEXT:
marker_list = tree_utils.get_paragraph_markers(origin.text)
if len(marker_list) > 0:
marker = '(%s)' % marker_list[0]
new_marker = '(%s)' % new_label
origin.text = origin.text.replace(marker, new_marker, 1)
elif origin.node_type == Node.INTERP:
marker = interpretations.get_first_interp_marker(origin.text)
marker = marker + '.'
new_marker = new_label + '.'
origin.text = origin.text.replace(marker, new_marker, 1)
return origin
def is_reserved_node(node):
""" Return true if the node is reserved. """
reserved_title = node.title and '[Reserved]' in node.title
reserved_text = node.text and '[Reserved]' in node.text
return (reserved_title or reserved_text)
def is_interp_placeholder(node):
"""Interpretations may have nodes that exist purely to enforce
structure. Knowing if a node is such a placeholder makes it easier to
know if a POST should really just modify the existing placeholder."""
return (Node.INTERP_MARK in node.label
and not node.text and not node.title)
class RegulationTree(object):
""" This encapsulates a regulation tree, and methods to change that tree.
"""
def __init__(self, previous_tree):
self.tree = copy.deepcopy(previous_tree)
self._kept__by_parent = defaultdict(list)
def keep(self, labels):
"""The 'KEEP' verb tells us that a node should not be removed
(generally because it would had we dropped the children of its
parent). "Keeping" those nodes makes sure they do not disappear when
editing their parent"""
for label in labels:
node = self.find_node(label)
parent_label = get_parent_label(node)
self._kept__by_parent[parent_label].append(node)
def get_parent(self, node):
""" Get the parent of a node. Returns None if parent not found. """
parent_label_id = get_parent_label(node)
return find(self.tree, parent_label_id)
def add_to_root(self, node):
""" Add a child to the root of the tree. """
self.tree.children.append(node)
for c in self.tree.children:
c.sortable = make_root_sortable(c.label, c.node_type)
self.tree.children.sort(key=lambda x: x.sortable)
for c in self.tree.children:
del c.sortable
def add_child(self, children, node, order=None):
""" Add a child to the children, and sort appropriately. This is used
for non-root nodes. """
children = children + [node] # non-destructive
if order and set(order) == set(c.label_id() for c in children):
lookup = {}
for c in children:
lookup[c.label_id()] = c
return [lookup[label_id] for label_id in order]
else:
sort_order = []
for c in children:
if c.label[-1] == Node.INTERP_MARK:
sort_order.append((2,) + make_label_sortable(
c.label[-2], roman=(len(c.label) == 6)))
elif Node.INTERP_MARK in c.label:
marker_idx = c.label.index(Node.INTERP_MARK)
comment_pars = c.label[marker_idx + 1:]
sort_order.append((1,) + make_label_sortable(
comment_pars[-1], roman=(len(comment_pars) == 2)))
elif c.node_type == Node.APPENDIX:
sort_order.append(make_label_sortable(c.label[-1], False))
else:
sort_order.append(make_label_sortable(
c.label[-1], roman=(len(c.label) == 5)))
new_el_sort = sort_order[-1]
sort_order = sort_order[:-1]
# Use bisect so the whole list isn't resorted (the original list
# may not be strictly sorted)
insert_idx = bisect(sort_order, new_el_sort)
return children[:insert_idx] + [node] + children[insert_idx:-1]
def delete_from_parent(self, node):
""" Delete node from it's parent, effectively removing it from the
tree. """
parent = self.get_parent(node)
other_children = [c for c in parent.children if c.label != node.label]
parent.children = other_children
def delete(self, label_id):
""" Delete the node with label_id from the tree. """
node = find(self.tree, label_id)
if node is None:
logging.warning("Attempting to delete %s failed", label_id)
else:
self.delete_from_parent(node)
def reserve(self, label_id, node):
""" Reserve either an existing node (by replacing it) or
reserve by adding a new node. When a node is reserved, it's
represented in the FR XML. We simply use that representation here
instead of doing something else. """
existing_node = find(self.tree, label_id)
if existing_node is None:
self.add_node(node)
else:
self.replace_node_and_subtree(node)
def move(self, origin, destination):
""" Move a node from one part in the tree to another. """
origin = find(self.tree, origin)
self.delete_from_parent(origin)
origin = overwrite_marker(origin, destination[-1])
origin.label = destination
self.add_node(origin)
def get_section_parent(self, node):
""" If we're trying to get the parent of an existing section, it
might be part of a subpart. So, let's find the correct subpart. """
subpart = self.get_subpart_for_node(node.label_id())
if subpart is not None:
return subpart
else:
return self.get_parent(node)
def replace_node_and_subtree(self, node):
""" Replace an existing node in the tree with node. """
if len(node.label) == 2 and node.node_type == Node.REGTEXT:
parent = self.get_section_parent(node)
else:
parent = self.get_parent(node)
prev_idx = [idx for idx, c in enumerate(parent.children)
if c.label == node.label]
if prev_idx:
# replace existing element in place
prev_idx = prev_idx[0]
parent.children = (parent.children[:prev_idx] + [node] +
parent.children[prev_idx + 1:])
else:
# actually adding a new element
parent.children = self.add_child(parent.children, node,
getattr(parent, 'child_labels',
[]))
# Finally, we see if this node is the parent of any 'kept' children.
# If so, add them back
label_id = node.label_id()
if label_id in self._kept__by_parent:
for kept in self._kept__by_parent[label_id]:
node.children = self.add_child(node.children, kept,
getattr(node, 'child_labels',
[]))
def create_empty_node(self, node_label):
""" In rare cases, we need to flush out the tree by adding
an empty node. Returns the created node"""
node_label = node_label.split('-')
if Node.INTERP_MARK in node_label:
node_type = Node.INTERP
elif len(node_label) > 1 and not node_label[1].isdigit():
node_type = Node.APPENDIX
else:
node_type = Node.REGTEXT
node = Node(label=node_label, node_type=node_type)
parent = self.get_parent(node)
if not parent:
parent = self.create_empty_node(get_parent_label(node))
parent.children = self.add_child(parent.children, node,
getattr(parent, 'child_labels', []))
return node
def contains(self, label):
"""Is this label already in the tree? label can be a list or a
string"""
return bool(self.find_node(label))
def find_node(self, label):
if isinstance(label, list):
label = '-'.join(label)
return find(self.tree, label)
def add_node(self, node):
""" Add an entirely new node to the regulation tree. """
existing = find(self.tree, node.label_id())
if existing and is_reserved_node(existing):
logging.warning('Replacing reserved node: %s' % node.label_id())
return self.replace_node_and_subtree(node)
elif existing and is_interp_placeholder(existing):
existing.title = node.title
existing.text = node.text
if hasattr(node, 'tagged_text'):
existing.tagged_text = node.tagged_text
# Unfortunately, the same nodes (particularly headers) might be
# added by multiple notices...
elif (existing and existing.text == node.text
and existing.title == node.title
and getattr(existing, 'tagged_text', '') == getattr(
node, 'tagged_text', '')):
pass
else:
if existing:
logging.warning(
'Adding a node that already exists: %s' % node.label_id())
print '%s %s' % (existing.text, node.label)
print '----'
if ((node.node_type == Node.APPENDIX and len(node.label) == 2)
or node.node_type == Node.SUBPART):
return self.add_to_root(node)
else:
parent = self.get_parent(node)
if parent is None:
# This is a corner case, where we're trying to add a child
# to a parent that should exist.
logging.warning('No existing parent for: %s' %
node.label_id())
parent = self.create_empty_node(get_parent_label(node))
# Fix the case where the node with label "<PART>-Subpart" is
# the correct parent.
if (parent.children
and parent.children[0].node_type == Node.EMPTYPART):
parent = parent.children[0]
parent.children = self.add_child(
parent.children, node, getattr(parent, 'child_labels',
[]))
def add_section(self, node, subpart_label):
""" Add a new section to a subpart. """
subpart = find(self.tree, '-'.join(subpart_label))
subpart.children = self.add_child(subpart.children, node)
def replace_node_text(self, label, change):
""" Replace just a node's text. """
node = find(self.tree, label)
node.text = change['node']['text']
def replace_node_title(self, label, change):
""" Replace just a node's title. """
node = find(self.tree, label)
node.title = change['node']['title']
def replace_node_heading(self, label, change):
""" A node's heading is it's keyterm. We handle this here, but not
well, I think. """
node = find(self.tree, label)
node.text = replace_first_sentence(node.text, change['node']['text'])
if hasattr(node, 'tagged_text') and 'tagged_text' in change['node']:
node.tagged_text = replace_first_sentence(
node.tagged_text, change['node']['tagged_text'])
def get_subparts(self):
""" Get all the subparts and empty parts in the tree. """
def subpart_type(c):
""" Return True if a subpart or an empty part. """
return c.node_type in (Node.EMPTYPART, Node.SUBPART)
return [c for c in self.tree.children if subpart_type(c)]
def create_new_subpart(self, subpart_label):
""" Create a whole new subpart. """
# XXX Subparts need titles. We'll need to pull this up from parsing.
subpart_node = Node('', [], subpart_label, None, Node.SUBPART)
self.add_to_root(subpart_node)
return subpart_node
def get_subpart_for_node(self, label_id):
""" Return the subpart a node resides in. Note that this can't be
determined by simply looking at a node's label. """
subparts = self.get_subparts()
subparts_with_label = [s for s in subparts
if find(s, label_id) is not None]
if len(subparts_with_label) > 0:
return subparts_with_label[0]
def move_to_subpart(self, label, subpart_label):
""" Move an existing node to another subpart. If the new subpart
doesn't exist, create it. """
destination = find(self.tree, '-'.join(subpart_label))
if destination is None:
destination = self.create_new_subpart(subpart_label)
subpart_with_node = self.get_subpart_for_node(label)
if destination and subpart_with_node:
node = find(subpart_with_node, label)
other_children = [c for c in subpart_with_node.children
if c.label_id() != label]
subpart_with_node.children = other_children
destination.children = self.add_child(destination.children, node)
if not subpart_with_node.children:
self.delete('-'.join(subpart_with_node.label))
def dict_to_node(node_dict):
""" Convert a dictionary representation of a node into a Node object if
it contains the minimum required fields. Otherwise, pass it through
unchanged. """
minimum_fields = set(('text', 'label', 'node_type'))
if minimum_fields.issubset(node_dict.keys()):
node = Node(
node_dict['text'], [], node_dict['label'],
node_dict.get('title', None), node_dict['node_type'])
if 'tagged_text' in node_dict:
node.tagged_text = node_dict['tagged_text']
if 'child_labels' in node_dict:
node.child_labels = node_dict['child_labels']
return node
else:
return node_dict
def sort_labels(labels):
""" Deal with higher up elements first. """
sorted_labels = sorted(labels, key=lambda x: len(x))
# The length of a Subpart label doesn't indicate it's level in the tree
subparts = [l for l in sorted_labels if 'Subpart' in l]
non_subparts = [l for l in sorted_labels if 'Subpart' not in l]
return subparts + non_subparts
def replace_node_field(reg, label, change):
""" Call one of the field appropriate methods if we're changing just
a field on a node. """
if change['action'] == 'PUT' and change['field'] == '[text]':
reg.replace_node_text(label, change)
elif change['action'] == 'PUT' and change['field'] == '[title]':
reg.replace_node_title(label, change)
elif change['action'] == 'PUT' and change['field'] == '[heading]':
reg.replace_node_heading(label, change)
def one_change(reg, label, change):
"""Notices are generally composed of many changes; this method handles a
single change to the tree."""
field_list = ['[text]', '[title]', '[heading]']
replace_subtree = 'field' not in change
if change['action'] == 'PUT' and replace_subtree:
node = dict_to_node(change['node'])
reg.replace_node_and_subtree(node)
elif change['action'] == 'PUT' and change['field'] in field_list:
replace_node_field(reg, label, change)
elif change['action'] == 'POST':
node = dict_to_node(change['node'])
if 'subpart' in change and len(node.label) == 2:
reg.add_section(node, change['subpart'])
else:
reg.add_node(node)
elif change['action'] == 'DESIGNATE':
if 'Subpart' in change['destination']:
reg.move_to_subpart(label, change['destination'])
elif change['action'] == 'MOVE':
reg.move(label, change['destination'])
elif change['action'] == 'DELETE':
reg.delete(label)
elif change['action'] == 'RESERVE':
node = dict_to_node(change['node'])
reg.reserve(label, node)
else:
print "%s: %s" % (change['action'], label)
def _needs_delay(reg, change):
"""Determine whether we should delay processing this change. This will
be used in a second pass when compiling the reg"""
action = change['action']
if action == 'MOVE':
return reg.contains(change['destination'])
if action == 'POST':
existing = reg.find_node(change['node']['label'])
return existing and not is_reserved_node(existing)
return False
def compile_regulation(previous_tree, notice_changes):
""" Given a last full regulation tree, and the set of changes from the
next final notice, construct the next full regulation tree. """
reg = RegulationTree(previous_tree)
labels = sort_labels(notice_changes.keys())
reg_part = previous_tree.label[0]
labels = filter(lambda l: l.split('-')[0] == reg_part, labels)
next_pass = [(label, change)
for label in labels
for change in notice_changes[label]]
pass_len = len(next_pass) + 1
reg.keep(l for l, change in next_pass if change['action'] == Verb.KEEP)
next_pass = [pair for pair in next_pass if pair[1]['action'] != Verb.KEEP]
# Monotonically decreasing length - guarantees we'll end
while pass_len > len(next_pass):
pass_len = len(next_pass)
current_pass, next_pass = next_pass, []
for label, change in current_pass:
if _needs_delay(reg, change):
next_pass.append((label, change))
else:
one_change(reg, label, change)
# Force any remaining changes -- generally means something went wrong
for label, change in next_pass:
logging.warning('Conflicting Change: %s:%s', label, change['action'])
one_change(reg, label, change)
return reg.tree
| EricSchles/regulations-parser | regparser/notice/compiler.py | Python | cc0-1.0 | 21,565 | 0.000139 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Mick Krippendorf <m.krippendorf@freenet.de>
__version__ = '0.2.5a'
__date__ = '2014-09-27'
__author__ = 'Mick Krippendorf <m.krippendorf@freenet.de>'
__license__ = 'MIT'
import pprint
from hornet import *
from hornet.symbols import (
A, Adj, B, C, Case, D, Det, E, F, Gender, L, NP, Noun, Number, Rest, S, T,
Trans, VP, Verb, W, X, Y, Z, accusative, adj, dative, det, feminine,
genitive, intransitive, masculine, neuter, nominative, noun, noun_unknown,
np, np_unknown, plural, s, singular, transitive, verb, vp,
)
def grammar(db):
db.tell(
s(S) << s(S, []),
s >>
np(Number, nominative) &
vp(Number, nominative, intransitive),
s >>
np(Number, Case) &
vp(Number, Case, transitive),
np(plural, Case) >>
noun(_, plural, Case),
np(Number, Case) >>
det(Gender, Number, Case) &
noun(Gender, Number, Case),
vp(Number, nominative, intransitive) >>
verb(Number, nominative, intransitive),
vp(Number, accusative, intransitive) >>
verb(Number, accusative, intransitive),
vp(_, dative, transitive) >>
verb(Number, nominative, transitive) &
np(Number, nominative),
vp(Number, nominative, transitive) >>
verb(Number, nominative, transitive) &
np(_, dative),
vp(Number, nominative, transitive) >>
verb(Number, accusative, transitive) &
np(_, accusative),
det(masculine, singular, nominative) >> ['der'],
det(masculine, singular, genitive) >> ['des'],
det(masculine, singular, dative) >> ['dem'],
det(masculine, singular, accusative) >> ['den'],
det(masculine, plural, nominative) >> ['die'],
det(masculine, plural, genitive) >> ['der'],
det(masculine, plural, dative) >> ['den'],
det(masculine, plural, accusative) >> ['die'],
det(feminine, singular, nominative) >> ['die'],
det(feminine, singular, genitive) >> ['der'],
det(feminine, singular, dative) >> ['der'],
det(feminine, singular, accusative) >> ['die'],
det(feminine, plural, nominative) >> ['die'],
det(feminine, plural, genitive) >> ['der'],
det(feminine, plural, dative) >> ['den'],
det(feminine, plural, accusative) >> ['die'],
det(neuter, singular, nominative) >> ['das'],
det(neuter, singular, genitive) >> ['des'],
det(neuter, singular, dative) >> ['dem'],
det(neuter, singular, accusative) >> ['das'],
det(neuter, plural, nominative) >> ['die'],
det(neuter, plural, genitive) >> ['der'],
det(neuter, plural, dative) >> ['den'],
det(neuter, plural, accusative) >> ['die'],
det(masculine, singular, nominative) >> ['ein'],
det(masculine, singular, genitive) >> ['eines'],
det(masculine, singular, dative) >> ['einem'],
det(masculine, singular, accusative) >> ['einen'],
det(feminine, singular, nominative) >> ['eine'],
det(feminine, singular, genitive) >> ['einer'],
det(feminine, singular, dative) >> ['einer'],
det(feminine, singular, accusative) >> ['eine'],
det(_, plural, nominative) >> ['einige'],
det(_, plural, genitive) >> ['einiger'],
det(_, plural, dative) >> ['einigen'],
det(_, plural, accusative) >> ['einige'],
det(_, plural, nominative) >> ['viele'],
det(_, plural, genitive) >> ['vieler'],
det(_, plural, dative) >> ['vielen'],
det(_, plural, accusative) >> ['viele'],
det(_, plural, nominative) >> ['alle'],
det(_, plural, genitive) >> ['aller'],
det(_, plural, dative) >> ['allen'],
det(_, plural, accusative) >> ['alle'],
det(masculine, singular, nominative) >> ['kein'],
det(masculine, singular, genitive) >> ['keines'],
det(masculine, singular, dative) >> ['keinem'],
det(masculine, singular, accusative) >> ['keinen'],
det(masculine, plural, nominative) >> ['keine'],
det(masculine, plural, genitive) >> ['keiner'],
det(masculine, plural, dative) >> ['keinen'],
det(masculine, plural, accusative) >> ['keine'],
det(feminine, singular, nominative) >> ['keine'],
det(feminine, singular, genitive) >> ['keiner'],
det(feminine, singular, dative) >> ['keiner'],
det(feminine, singular, accusative) >> ['keine'],
det(feminine, plural, nominative) >> ['keine'],
det(feminine, plural, genitive) >> ['keiner'],
det(feminine, plural, dative) >> ['keinen'],
det(feminine, plural, accusative) >> ['keine'],
det(masculine, singular, nominative) >> ['mancher'],
det(masculine, singular, genitive) >> ['manches'],
det(masculine, singular, dative) >> ['manchem'],
det(masculine, singular, accusative) >> ['manchen'],
det(masculine, plural, nominative) >> ['manche'],
det(masculine, plural, genitive) >> ['mancher'],
det(masculine, plural, dative) >> ['manchen'],
det(masculine, plural, accusative) >> ['manchen'],
det(feminine, singular, nominative) >> ['manche'],
det(feminine, singular, genitive) >> ['mancher'],
det(feminine, singular, dative) >> ['mancher'],
det(feminine, singular, accusative) >> ['manche'],
det(feminine, plural, nominative) >> ['manche'],
det(feminine, plural, genitive) >> ['mancher'],
det(feminine, plural, dative) >> ['manchen'],
det(feminine, plural, accusative) >> ['manche'],
det(masculine, singular, nominative) >> ['jeder'],
det(masculine, singular, genitive) >> ['jedes'],
det(masculine, singular, dative) >> ['jedem'],
det(masculine, singular, accusative) >> ['jeden'],
det(feminine, singular, nominative) >> ['jede'],
det(feminine, singular, genitive) >> ['jeder'],
det(feminine, singular, dative) >> ['jeder'],
det(feminine, singular, accusative) >> ['jede'],
noun(masculine, singular, nominative) >> ['hund'],
noun(masculine, singular, genitive) >> ['hundes'],
noun(masculine, singular, dative) >> ['hund'],
noun(masculine, singular, accusative) >> ['hund'],
noun(masculine, plural, nominative) >> ['hunde'],
noun(masculine, plural, genitive) >> ['hunde'],
noun(masculine, plural, dative) >> ['hunden'],
noun(masculine, plural, accusative) >> ['hunde'],
noun(feminine, singular, nominative) >> ['katze'],
noun(feminine, singular, genitive) >> ['katze'],
noun(feminine, singular, dative) >> ['katze'],
noun(feminine, singular, accusative) >> ['katze'],
noun(feminine, plural, nominative) >> ['katzen'],
noun(feminine, plural, genitive) >> ['katzen'],
noun(feminine, plural, dative) >> ['katzen'],
noun(feminine, plural, accusative) >> ['katzen'],
noun(masculine, singular, nominative) >> ['kater'],
noun(masculine, singular, genitive) >> ['katers'],
noun(masculine, singular, dative) >> ['kater'],
noun(masculine, singular, accusative) >> ['kater'],
noun(masculine, plural, nominative) >> ['kater'],
noun(masculine, plural, genitive) >> ['kater'],
noun(masculine, plural, dative) >> ['katern'],
noun(masculine, plural, accusative) >> ['kater'],
noun(feminine, singular, nominative) >> ['maus'],
noun(feminine, singular, genitive) >> ['maus'],
noun(feminine, singular, dative) >> ['maus'],
noun(feminine, singular, accusative) >> ['maus'],
noun(feminine, plural, nominative) >> ['maeuse'],
noun(feminine, plural, genitive) >> ['maeuse'],
noun(feminine, plural, dative) >> ['maeusen'],
noun(feminine, plural, accusative) >> ['maeuse'],
noun(neuter, plural, nominative) >> ['leute'],
noun(neuter, plural, genitive) >> ['leute'],
noun(neuter, plural, dative) >> ['leuten'],
noun(neuter, plural, accusative) >> ['leute'],
verb(singular, nominative, Trans) >> ['fehlt'],
verb(plural, nominative, Trans) >> ['fehlen'],
verb(singular, dative, transitive) >> ['fehlt'],
verb(plural, dative, transitive) >> ['fehlen'],
verb(singular, _, intransitive) >> ['schlaeft'],
verb(plural, _, intransitive) >> ['schlafen'],
verb(singular, nominative, intransitive) >> ['frisst'],
verb(plural, nominative, intransitive) >> ['fressen'],
verb(singular, accusative, transitive) >> ['frisst'],
verb(plural, accusative, transitive) >> ['fressen'],
verb(singular, nominative, intransitive) >> ['jagt'],
verb(plural, nominative, intransitive) >> ['jagen'],
verb(singular, accusative, transitive) >> ['jagt'],
verb(plural, accusative, transitive) >> ['jagen'],
verb(singular, _, intransitive) >> ['spielt'],
verb(plural, _, intransitive) >> ['spielen'],
)
#for subst in db.ask(s(A) & member('jagen', A)):
#words = [B, 'hunde', 'jagen', C, 'katzen']
#words = ['manche', 'maeuse', 'jagen' | B]
#words = [D, 'kater', 'jagen' | B]
# words = 'manche maeuse jagen viele katze'.split()
# words = 'eine maus jagt viele katzen'.split()
# words = [B, C, 'jagen']
# words = ['manche', B, C]
# words = ['der', C, D, 'die', F]
words = [B, 'hund', D, E, F]
#words = [B, C, 'jagt', D, E]
# words = [A, 'jagen' | E]
#for i, subst in enumerate(db.ask(s(W) & join(W, S, ' '))):
for subst in db.ask(equal(words, W) & s(W) & join(W, S, ' ')):
print(subst[S]())
#print('Yes.')
#else:
#print('No.')
#print(repr(subst[S]))
#print(i)
def grammar2(db):
db.tell(
s(S, T) << s(T, S, []),
s(s(NP, VP)) >>
np(NP, Number, nominative) &
vp(VP, Number, nominative, intransitive),
np(np(Det, Noun, [Gender, Number]), Number, Case) >>
det(Det, Gender, Number, Case) &
noun(Noun, Gender, Number, Case),
np(np(Det, Adj, Noun, [Gender, Number]), Number, Case) >>
det(Det, Gender, Number, Case) &
adj(Adj, Gender, Number, Case) &
noun(Noun, Gender, Number, Case),
vp(vp(Verb, NP), Number, nominative, intransitive) >>
verb(Verb, Number, nominative, intransitive) &
np(NP, Number, nominative),
det(det('der'), masculine, singular, nominative) >> ['der'],
det(det('die'), feminine, singular, nominative) >> ['die'],
det(det('das'), neuter, singular, nominative) >> ['das'],
det(det('ein'), masculine, singular, nominative) >> ['ein'],
det(det('eine'), feminine, singular, nominative) >> ['eine'],
det(det('kein'), masculine, singular, nominative) >> ['kein'],
det(det('keine'), feminine, singular, nominative) >> ['keine'],
det(det('jeder'), masculine, singular, nominative) >> ['jeder'],
det(det('jede'), feminine, singular, nominative) >> ['jede'],
adj(adj('betretbarer'), masculine, singular, nominative) >>
['betretbarer'],
#noun(noun('raum'), masculine, singular, nominative) >>
#['raum'] & {cut},
verb(verb('ist'), singular, nominative, intransitive) >>
['ist'] & {cut},
)
"""
Die Kabine ist ein Raum. "Kabinen an Bord eines Raumschiffs..."
Das Bad ist östlich von der Kabine. Die Beschreibung ist "Wie eine Kabine, ist auch das Bad..."
Die Broschüre ist in der Kabine. "Sie beschreibt die Herrlichkeit..."
Das Bett ist in der Kabine.
Das Bett ist ein betretbarer Raum.
Setze "Möbel" mit Bett gleich.
Der Spiegel ist Kulisse im Bad.
Die Dusche ist hier. Sie ist unbeweglich.
"""
def mudlang2(db):
nouns = {}
def test(term, env, db, trail):
nouns[env.X.name] = dict(
gender=str(env.Gender),
number=str(env.Number),
case=str(env.Case),
)
grammar2(db)
db.tell(
#noun(noun(X), Gender, Number, Case, [X|Y], Y)
noun(noun(X), Gender, Number, Case, [X|Y], Y)[test],
)
L = 'die kabine ist ein betretbarer raum'.split()
for subst in db.ask(equal(L, S) & s(S, T)):
print(subst[S])
print(subst[T])
pprint.pprint(nouns)
db = Database()
grammar(db)
mudlang2(db)
| pillmuncher/hornet | src/examples/parsing.py | Python | mit | 12,639 | 0.003561 |
try:
import json
except ImportError:
import simplejson as json
from UserList import UserList
class Resource(object):
"""Object wrapper for resources.
Provides an object interface to resources returned by the Soundcloud API.
"""
def __init__(self, obj):
self.obj = obj
def __getstate__(self):
return self.obj.items()
def __setstate__(self, items):
if not hasattr(self, 'obj'):
self.obj = {}
for key, val in items:
self.obj[key] = val
def __getattr__(self, name):
if name in self.obj:
return self.obj.get(name)
raise AttributeError
def fields(self):
return self.obj
def keys(self):
return self.obj.keys()
class ResourceList(UserList):
"""Object wrapper for lists of resources."""
def __init__(self, resources=[]):
data = [Resource(resource) for resource in resources]
super(ResourceList, self).__init__(data)
def wrapped_resource(response):
"""Return a response wrapped in the appropriate wrapper type.
Lists will be returned as a ```ResourceList``` instance,
dicts will be returned as a ```Resource``` instance.
"""
try:
content = json.loads(response.content)
except ValueError:
# not JSON
content = response.content
if isinstance(content, list):
result = ResourceList(content)
else:
result = Resource(content)
result.raw_data = response.content
for attr in ['url', 'status_code', 'error']:
setattr(result, attr, getattr(response, attr))
return result
| Fauxmoehawkeen/soundcloud-python-master | soundcloud/resource.py | Python | bsd-2-clause | 1,625 | 0 |
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Vincent Fretin, Amen Souissi
import transaction
from substanced.util import get_oid
from dace.processinstance.event import DelayedCallback
from dace.util import (
find_catalog, getAllSystemActions,
get_system_request, BaseJob)
from dace import log
last_transaction_by_machine = {}
def _call_action(action):
transaction.begin()
try:
context = action.get_potential_context()
if context is None:
return
request = get_system_request()
request.invalidate_cache = True
action.execute(context, request, {})
log.info("Execute action %s", action.title)
transaction.commit()
except Exception as e:
transaction.abort()
log.exception(e)
def _get_cache_key():
request = get_system_request()
return str(get_oid(request.user))
def run():
request = get_system_request()
if request.user is None:
# in test, db connection closed
return
catalog = find_catalog('dace')
global last_transaction
cache_key = _get_cache_key()
last_transaction = last_transaction_by_machine.setdefault(cache_key, '')
last_tid = catalog._p_jar.db().lastTransaction()
if last_transaction != last_tid:
last_transaction_by_machine[cache_key] = last_tid
transaction.begin()
try:
system_actions = [a for a in getAllSystemActions()
if getattr(a, 'process', None) or
a.isstart]
log.info("new zodb transactions, actions to check: %s",
len(system_actions))
for action in system_actions:
_call_action(action)
except Exception as e:
log.exception(e)
log.info("actions to check: done")
run_crawler()
def run_crawler():
"""Start loop."""
job = BaseJob('system')
job.callable = run
dc = DelayedCallback(job, 2000)
dc.start()
| ecreall/dace | dace/objectofcollaboration/system.py | Python | agpl-3.0 | 2,093 | 0.000478 |
#!/usr/bin/python
#
# Copyright (c) 2017 Julien Stroheker, <juliens@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_acs
version_added: "2.4"
short_description: Manage an Azure Container Service Instance (ACS).
description:
- Create, update and delete an Azure Container Service Instance.
options:
resource_group:
description:
- Name of a resource group where the Container Services exists or will be created.
required: true
name:
description:
- Name of the Container Services instance.
required: true
default: null
state:
description:
- Assert the state of the ACS. Use 'present' to create or update an ACS and 'absent' to delete it.
default: present
choices:
- absent
- present
required: false
location:
description:
- Valid azure location. Defaults to location of the resource group.
default: resource_group location
required: false
orchestration_platform:
description:
- Specifies the Container Orchestration Platform to use. Currently can be either DCOS, Kubernetes or Swarm.
required: true
master_profile:
description:
- Master profile suboptions.
required: true
default: null
suboptions:
count:
description:
- Number of masters (VMs) in the container service cluster. Allowed values are 1, 3, and 5.
required: true
choices:
- 1
- 3
- 5
dns_prefix:
description:
- The DNS Prefix to use for the Container Service master nodes.
required: true
linux_profile:
description:
- The linux profile suboptions.
required: true
default: null
suboptions:
admin_username:
description:
- The Admin Username for the Cluster.
required: true
default: azureuser
ssh_key:
description:
- The Public SSH Key used to access the cluster.
required: true
agent_pool_profiles:
description:
- The agent pool profile suboptions.
required: true
default: null
suboptions:
name:
description:
- Unique name of the agent pool profile in the context of the subscription and resource group.
required: true
count:
description:
- Number of agents (VMs) to host docker containers. Allowed values must be in the range of 1 to 100 (inclusive).
required: true
default: 1
dns_prefix:
description:
- The DNS Prefix given to Agents in this Agent Pool.
required: true
vm_size:
description:
- The VM Size of each of the Agent Pool VM's (e.g. Standard_F1 / Standard_D2v2).
required: true
default: Standard_D2v2
service_principal:
description:
- The service principal suboptions.
required: false
default: null
suboptions:
client_id:
description:
- The ID for the Service Principal.
required: false
client_secret:
description:
- The secret password associated with the service principal.
required: false
diagnostics_profile:
description:
- Should VM Diagnostics be enabled for the Container Service VM's.
required: true
default: false
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Julien Stroheker (@julienstroheker)"
'''
EXAMPLES = '''
- name: Create an azure container services instance running Kubernetes
azure_rm_acs:
name: acctestcontservice1
location: eastus
resource_group: Testing
orchestration_platform: Kubernetes
master_profile:
- count: 3
dns_prefix: acsk8smasterdns
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
service_principal:
- client_id: "cf72ca99-f6b9-4004-b0e0-bee10c521948"
client_secret: "mySPNp@ssw0rd!"
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acsk8sagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
- name: Create an azure container services instance running DCOS
azure_rm_acs:
name: acctestcontservice2
location: eastus
resource_group: Testing
orchestration_platform: DCOS
master_profile:
- count: 3
dns_prefix: acsdcosmasterdns
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acscdcosagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
- name: Create an azure container services instance running Swarm
azure_rm_acs:
name: acctestcontservice3
location: eastus
resource_group: Testing
orchestration_platform: Swarm
master_profile:
- count: 3
dns_prefix: acsswarmmasterdns
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acsswarmagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
# Deletes the specified container service in the specified subscription and resource group.
# The operation does not delete other resources created as part of creating a container service,
# including storage accounts, VMs, and availability sets. All the other resources created with the container
# service are part of the same resource group and can be deleted individually.
- name: Remove an azure container services instance
azure_rm_acs:
name: acctestcontservice3
location: eastus
resource_group: Testing
state: absent
orchestration_platform: Swarm
master_profile:
- count: 1
dns_prefix: acstestingmasterdns5
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
service_principal:
- client_id: 7fb4173c-3ca3-4d5b-87f8-1daac941207a
client_secret: MPNSuM1auUuITefiLGBrpZZnLMDKBLw2
agent_pool_profiles:
- name: default
count: 4
dns_prefix: acctestagent15
vm_size: Standard_A0
diagnostics_profile: false
tags:
Ansible: azure_rm_acs
'''
RETURN = '''
state:
description: Current state of the azure container service
returned: always
type: dict
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.containerservice.models import (
ContainerService, ContainerServiceOrchestratorProfile, ContainerServiceCustomProfile,
ContainerServiceServicePrincipalProfile, ContainerServiceMasterProfile,
ContainerServiceAgentPoolProfile, ContainerServiceWindowsProfile,
ContainerServiceLinuxProfile, ContainerServiceSshConfiguration,
ContainerServiceDiagnosticsProfile, ContainerServiceSshPublicKey,
ContainerServiceVMDiagnostics
)
except ImportError:
# This is handled in azure_rm_common
pass
def create_agent_pool_profile_instance(agentpoolprofile):
'''
Helper method to serialize a dict to a ContainerServiceAgentPoolProfile
:param: agentpoolprofile: dict with the parameters to setup the ContainerServiceAgentPoolProfile
:return: ContainerServiceAgentPoolProfile
'''
return ContainerServiceAgentPoolProfile(
name=agentpoolprofile['name'],
count=agentpoolprofile['count'],
dns_prefix=agentpoolprofile['dns_prefix'],
vm_size=agentpoolprofile['vm_size']
)
def create_orch_platform_instance(orchestrator):
'''
Helper method to serialize a dict to a ContainerServiceOrchestratorProfile
:param: orchestrator: dict with the parameters to setup the ContainerServiceOrchestratorProfile
:return: ContainerServiceOrchestratorProfile
'''
return ContainerServiceOrchestratorProfile(
orchestrator_type=orchestrator,
)
def create_service_principal_profile_instance(spnprofile):
'''
Helper method to serialize a dict to a ContainerServiceServicePrincipalProfile
:param: spnprofile: dict with the parameters to setup the ContainerServiceServicePrincipalProfile
:return: ContainerServiceServicePrincipalProfile
'''
return ContainerServiceServicePrincipalProfile(
client_id=spnprofile[0]['client_id'],
secret=spnprofile[0]['client_secret']
)
def create_linux_profile_instance(linuxprofile):
'''
Helper method to serialize a dict to a ContainerServiceLinuxProfile
:param: linuxprofile: dict with the parameters to setup the ContainerServiceLinuxProfile
:return: ContainerServiceLinuxProfile
'''
return ContainerServiceLinuxProfile(
admin_username=linuxprofile[0]['admin_username'],
ssh=create_ssh_configuration_instance(linuxprofile[0]['ssh_key'])
)
def create_ssh_configuration_instance(sshconf):
'''
Helper method to serialize a dict to a ContainerServiceSshConfiguration
:param: sshconf: dict with the parameters to setup the ContainerServiceSshConfiguration
:return: ContainerServiceSshConfiguration
'''
listssh = []
key = ContainerServiceSshPublicKey(key_data=str(sshconf))
listssh.append(key)
return ContainerServiceSshConfiguration(
public_keys=listssh
)
def create_master_profile_instance(masterprofile):
'''
Helper method to serialize a dict to a ContainerServiceMasterProfile
:param: masterprofile: dict with the parameters to setup the ContainerServiceMasterProfile
:return: ContainerServiceMasterProfile
'''
return ContainerServiceMasterProfile(
count=masterprofile[0]['count'],
dns_prefix=masterprofile[0]['dns_prefix']
)
def create_diagnostics_profile_instance(diagprofile):
'''
Helper method to serialize a dict to a ContainerServiceDiagnosticsProfile
:param: diagprofile: dict with the parameters to setup the ContainerServiceDiagnosticsProfile
:return: ContainerServiceDiagnosticsProfile
'''
return ContainerServiceDiagnosticsProfile(
vm_diagnostics=create_vm_diagnostics_instance(diagprofile)
)
def create_vm_diagnostics_instance(vmdiag):
'''
Helper method to serialize a dict to a ContainerServiceVMDiagnostics
:param: vmdiag: dict with the parameters to setup the ContainerServiceVMDiagnostics
:return: ContainerServiceVMDiagnostics
'''
return ContainerServiceVMDiagnostics(
enabled=vmdiag
)
def create_acs_dict(acs):
'''
Helper method to deserialize a ContainerService to a dict
:param: acs: ContainerService or AzureOperationPoller with the Azure callback object
:return: dict with the state on Azure
'''
results = dict(
id=acs.id,
name=acs.name,
location=acs.location,
tags=acs.tags,
orchestrator_profile=create_orchestrator_profile_dict(acs.orchestrator_profile),
master_profile=create_master_profile_dict(acs.master_profile),
linux_profile=create_linux_profile_dict(acs.linux_profile),
service_principal_profile=acs.service_principal_profile,
diagnostics_profile=create_diagnotstics_profile_dict(acs.diagnostics_profile),
provisioning_state=acs.provisioning_state,
agent_pool_profiles=create_agent_pool_profiles_dict(acs.agent_pool_profiles),
type=acs.type
)
return results
def create_linux_profile_dict(linuxprofile):
'''
Helper method to deserialize a ContainerServiceLinuxProfile to a dict
:param: linuxprofile: ContainerServiceLinuxProfile with the Azure callback object
:return: dict with the state on Azure
'''
results = dict(
ssh_key=linuxprofile.ssh.public_keys[0].key_data,
admin_username=linuxprofile.admin_username
)
return results
def create_master_profile_dict(masterprofile):
'''
Helper method to deserialize a ContainerServiceMasterProfile to a dict
:param: masterprofile: ContainerServiceMasterProfile with the Azure callback object
:return: dict with the state on Azure
'''
results = dict(
count=masterprofile.count,
fqdn=masterprofile.fqdn,
dns_prefix=masterprofile.dns_prefix
)
return results
def create_diagnotstics_profile_dict(diagnosticsprofile):
'''
Helper method to deserialize a ContainerServiceVMDiagnostics to a dict
:param: diagnosticsprofile: ContainerServiceVMDiagnostics with the Azure callback object
:return: dict with the state on Azure
'''
results = dict(
vm_diagnostics=diagnosticsprofile.vm_diagnostics.enabled
)
return results
def create_orchestrator_profile_dict(orchestratorprofile):
'''
Helper method to deserialize a ContainerServiceOrchestratorProfile to a dict
:param: orchestratorprofile: ContainerServiceOrchestratorProfile with the Azure callback object
:return: dict with the state on Azure
'''
results = dict(
orchestrator_type=str(orchestratorprofile.orchestrator_type)
)
return results
def create_agent_pool_profiles_dict(agentpoolprofiles):
'''
Helper method to deserialize a ContainerServiceAgentPoolProfile to a dict
:param: agentpoolprofiles: ContainerServiceAgentPoolProfile with the Azure callback object
:return: dict with the state on Azure
'''
results = []
for profile in agentpoolprofiles:
result = dict(
count=profile.count,
vm_size=profile.vm_size,
name=profile.name,
dns_prefix=profile.dns_prefix,
fqdn=profile.fqdn
)
results.append(result)
return results
class AzureRMContainerService(AzureRMModuleBase):
"""Configuration class for an Azure RM container service resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
state=dict(
type='str',
required=False,
default='present',
choices=['present', 'absent']
),
location=dict(
type='str',
required=False
),
orchestration_platform=dict(
type='str',
required=True,
choices=['DCOS', 'Kubernetes', 'Swarm']
),
master_profile=dict(
type='list',
required=True
),
linux_profile=dict(
type='list',
required=True
),
agent_pool_profiles=dict(
type='list',
required=True
),
service_principal=dict(
type='list',
required=False
),
diagnostics_profile=dict(
type='bool',
required=True
)
)
self.resource_group = None
self.name = None
self.location = None
self.tags = None
self.state = None
self.orchestration_platform = None
self.master_profile = None
self.linux_profile = None
self.agent_pool_profiles = None
self.service_principal = None
self.diagnostics_profile = None
self.results = dict(changed=False, state=dict())
super(AzureRMContainerService, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
resource_group = None
response = None
results = dict()
to_be_updated = False
try:
resource_group = self.get_resource_group(self.resource_group)
except CloudError:
self.fail('resource group {} not found'.format(self.resource_group))
if not self.location:
self.location = resource_group.location
# Check if the ACS instance already present in the RG
if self.state == 'present':
if self.orchestration_platform == 'Kubernetes':
if not self.service_principal:
self.fail('service_principal should be specified when using Kubernetes')
if not self.service_principal[0].get('client_id'):
self.fail('service_principal.client_id should be specified when using Kubernetes')
if not self.service_principal[0].get('client_secret'):
self.fail('service_principal.client_secret should be specified when using Kubernetes')
mastercount = self.master_profile[0].get('count')
if mastercount != 1 and mastercount != 3 and mastercount != 5:
self.fail('Master Count number wrong : {} / should be 1 3 or 5'.format(mastercount))
# For now Agent Pool cannot be more than 1, just remove this part in the future if it change
agentpoolcount = len(self.agent_pool_profiles)
if agentpoolcount > 1:
self.fail('You cannot specify more than agent_pool_profiles')
response = self.get_acs()
self.results['state'] = response
if not response:
to_be_updated = True
else:
self.log('Results : {0}'.format(response))
update_tags, response['tags'] = self.update_tags(response['tags'])
if response['provisioning_state'] == "Succeeded":
if update_tags:
to_be_updated = True
# Cannot Update the master count for now // Uncomment this block in the future to support it
if response['master_profile'].get('count') != self.master_profile[0].get('count'):
# self.log(("Master Profile Count Diff, Was {0} / Now {1}"
# .format(response['master_profile'].count,
# self.master_profile[0].get('count'))))
# to_be_updated = True
self.module.warn("master_profile.count cannot be updated")
# Cannot Update the SSH Key for now // Uncomment this block in the future to support it
if response['linux_profile'].get('ssh_key') != self.linux_profile[0].get('ssh_key'):
# self.log(("Linux Profile Diff SSH, Was {0} / Now {1}"
# .format(response['linux_profile'].ssh.public_keys[0].key_data,
# self.linux_profile[0].get('ssh_key'))))
# to_be_updated = True
self.module.warn("linux_profile.ssh_key cannot be updated")
# self.log("linux_profile response : {0}".format(response['linux_profile'].get('admin_username')))
# self.log("linux_profile self : {0}".format(self.linux_profile[0].get('admin_username')))
# Cannot Update the Username for now // Uncomment this block in the future to support it
if response['linux_profile'].get('admin_username') != self.linux_profile[0].get('admin_username'):
# self.log(("Linux Profile Diff User, Was {0} / Now {1}"
# .format(response['linux_profile'].admin_username,
# self.linux_profile[0].get('admin_username'))))
# to_be_updated = True
self.module.warn("linux_profile.admin_username cannot be updated")
# Cannot have more that one agent pool profile for now // Uncomment this block in the future to support it
# if len(response['agent_pool_profiles']) != len(self.agent_pool_profiles):
# self.log("Agent Pool count is diff, need to updated")
# to_be_updated = True
for profile_result in response['agent_pool_profiles']:
matched = False
for profile_self in self.agent_pool_profiles:
if profile_result['name'] == profile_self['name']:
matched = True
if profile_result['count'] != profile_self['count'] or profile_result['vm_size'] != profile_self['vm_size']:
self.log(("Agent Profile Diff - Count was {0} / Now {1} - Vm_size was {2} / Now {3}"
.format(profile_result['count'], profile_self['count'],
profile_result['vm_size'], profile_self['vm_size'])))
to_be_updated = True
if not matched:
self.log("Agent Pool not found")
to_be_updated = True
if to_be_updated:
self.log("Need to Create / Update the ACS instance")
if self.check_mode:
return self.results
self.results['state'] = self.create_update_acs()
self.results['changed'] = True
self.log("Creation / Update done")
elif self.state == 'absent':
self.delete_acs()
self.log("ACS instance deleted")
return self.results
def create_update_acs(self):
'''
Creates or updates a container service with the specified configuration of orchestrator, masters, and agents.
:return: deserialized ACS instance state dictionary
'''
self.log("Creating / Updating the ACS instance {0}".format(self.name))
service_principal_profile = None
agentpools = []
if self.agent_pool_profiles:
for profile in self.agent_pool_profiles:
self.log("Trying to push the following Profile {0}".format(profile))
agentpools.append(create_agent_pool_profile_instance(profile))
if self.orchestration_platform == 'Kubernetes':
service_principal_profile = create_service_principal_profile_instance(self.service_principal)
parameters = ContainerService(
location=self.location,
tags=self.tags,
orchestrator_profile=create_orch_platform_instance(self.orchestration_platform),
service_principal_profile=service_principal_profile,
linux_profile=create_linux_profile_instance(self.linux_profile),
master_profile=create_master_profile_instance(self.master_profile),
agent_pool_profiles=agentpools,
diagnostics_profile=create_diagnostics_profile_instance(self.diagnostics_profile)
)
# self.log("orchestrator_profile : {0}".format(parameters.orchestrator_profile))
# self.log("service_principal_profile : {0}".format(parameters.service_principal_profile))
# self.log("linux_profile : {0}".format(parameters.linux_profile))
# self.log("ssh from yaml : {0}".format(results.get('linux_profile')[0]))
# self.log("ssh : {0}".format(parameters.linux_profile.ssh))
# self.log("master_profile : {0}".format(parameters.master_profile))
# self.log("agent_pool_profiles : {0}".format(parameters.agent_pool_profiles))
# self.log("vm_diagnostics : {0}".format(parameters.diagnostics_profile.vm_diagnostics))
try:
poller = self.containerservice_client.container_services.create_or_update(self.resource_group, self.name, parameters)
response = self.get_poller_result(poller)
except CloudError as exc:
self.log('Error attempting to create the ACS instance.')
self.fail("Error creating the ACS instance: {0}".format(str(exc)))
return create_acs_dict(response)
def delete_acs(self):
'''
Deletes the specified container service in the specified subscription and resource group.
The operation does not delete other resources created as part of creating a container service,
including storage accounts, VMs, and availability sets.
All the other resources created with the container service are part of the same resource group and can be deleted individually.
:return: True
'''
self.log("Deleting the ACS instance {0}".format(self.name))
try:
poller = self.containerservice_client.container_services.delete(self.resource_group, self.name)
self.get_poller_result(poller)
except CloudError as e:
self.log('Error attempting to delete the ACS instance.')
self.fail("Error deleting the ACS instance: {0}".format(str(e)))
return True
def get_acs(self):
'''
Gets the properties of the specified container service.
:return: deserialized ACS instance state dictionary
'''
self.log("Checking if the ACS instance {0} is present".format(self.name))
found = False
try:
response = self.containerservice_client.container_services.get(self.resource_group, self.name)
found = True
self.log("Response : {0}".format(response))
self.log("ACS instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the ACS instance.')
if found is True:
return create_acs_dict(response)
else:
return False
def main():
"""Main execution"""
AzureRMContainerService()
if __name__ == '__main__':
main()
| e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/azure/azure_rm_acs.py | Python | bsd-3-clause | 27,547 | 0.002868 |
import logging
import requests
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.utils import timezone
from invitations.models import Invitation
logger = logging.getLogger('email')
sentry = logging.getLogger('sentry')
def send_invite(message):
try:
invite = Invitation.objects.get(
id=message.get('id'),
status__in=[Invitation.PENDING, Invitation.ERROR],
)
except Invitation.DoesNotExist:
sentry.error("Invitation to send not found", exc_info=True, extra={'message': message})
return
invite.status = Invitation.PROCESSING
invite.save()
context = {
'invite': invite,
'domain': Site.objects.get_current().domain,
}
subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender)
if invite.book:
subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender)
txt = get_template('email/invitation.txt').render(context)
html = get_template('email/invitation.html').render(context)
try:
message = EmailMultiAlternatives(
subject=subject,
body=txt,
from_email="ContactOtter <invites@contactotter.com>",
to=[invite.email,],
)
message.attach_alternative(html, "text/html")
message.send()
invite.status = Invitation.SENT
invite.sent = timezone.now()
invite.save()
except:
sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id})
invite.status = Invitation.ERROR
invite.save()
| phildini/logtacts | invitations/consumers.py | Python | mit | 1,739 | 0.004025 |
import unittest
import armv6instrdecode
import globals
import utils
import logging
import ARMCPU
import pdb
# if ConditionPassed(cond) then
# Rd = Rn + shifter_operand
# if S == 1 and Rd == R15 then
# if CurrentModeHasSPSR() then
# CPSR = SPSR
# else UNPREDICTABLE
# else if S == 1 then
# N Flag = Rd[31]
# Z Flag = if Rd == 0 then 1 else 0
# C Flag = CarryFrom(Rn + shifter_operand)
# V Flag = OverflowFrom(Rn + shifter_operand)
logfile = "TestBKPT.log"
with open(logfile, 'w'):
pass
logging.basicConfig(filename=logfile,level=logging.DEBUG)
class TestBKPT(unittest.TestCase):
"""Instructions"""
# preparing to test
def setUp(self):
""" Setting up for the test """
self.addr = 0
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# E1210070 700021E1 BKPT #4096
# 33222222222211111111110000000000
# 10987654321098765432109876543210
# 0b11100001001000010000000001110000 - BKPT
# 0b11100001001100010000000000000000 - TEQ
def testBKPT(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testBKPT")
code = 0xE1210070 # BKPT #4096
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 0)
logging.debug("1:" + instrStr)
self.assertEqual(instrStr, " E1210070 BKPT AL BKPT #4096", instrStr)
if __name__ == "__main__":
unittest.main() | jsymolon/ARMSim | TestBKPT.py | Python | gpl-2.0 | 1,495 | 0.002676 |
"""Tests for the Abode cover device."""
from unittest.mock import patch
from homeassistant.components.abode import ATTR_DEVICE_ID
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
STATE_CLOSED,
)
from .common import setup_platform
DEVICE_ID = "cover.garage_door"
async def test_entity_registry(hass):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, COVER_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get(DEVICE_ID)
assert entry.unique_id == "61cbz3b542d2o33ed2fz02721bda3324"
async def test_attributes(hass):
"""Test the cover attributes are correct."""
await setup_platform(hass, COVER_DOMAIN)
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000007"
assert not state.attributes.get("battery_low")
assert not state.attributes.get("no_response")
assert state.attributes.get("device_type") == "Secure Barrier"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Garage Door"
async def test_open(hass):
"""Test the cover can be opened."""
await setup_platform(hass, COVER_DOMAIN)
with patch("abodepy.AbodeCover.open_cover") as mock_open:
await hass.services.async_call(
COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_open.assert_called_once()
async def test_close(hass):
"""Test the cover can be closed."""
await setup_platform(hass, COVER_DOMAIN)
with patch("abodepy.AbodeCover.close_cover") as mock_close:
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_CLOSE_COVER,
{ATTR_ENTITY_ID: DEVICE_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_close.assert_called_once()
| partofthething/home-assistant | tests/components/abode/test_cover.py | Python | apache-2.0 | 2,112 | 0.000473 |
#!/usr/bin/env python3
import uuid
from setuptools import setup, find_packages
import pathlib
import pkg_resources
with pathlib.Path('requirements.txt').open() as requirements_txt:
reqs = [
str(requirement)
for requirement
in pkg_resources.parse_requirements(requirements_txt)
]
setup(
name='Meerkat API',
version='0.0.1',
long_description=__doc__,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=reqs,
test_suite='meerkat_api.test'
)
| meerkat-code/meerkat_api | setup.py | Python | mit | 539 | 0 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.bibworkflow_engine import BibWorkflowEngine
from invenio.bibworkflow_object import BibWorkflowObject
from invenio.bibworkflow_model import Workflow, WfeObject
from invenio.bibworkflow_client import restart_workflow
from invenio.bibfield_jsonreader import JsonReader
from uuid import uuid1 as new_uuid
class DepositionWorkflow(object):
""" class for running webdeposit workflows using the BibWorkflow engine
The user_id and workflow must always be defined
If the workflow has been initialized before,
the appropriate uuid must be passed as a parameter.
Otherwise a new workflow will be created
The workflow functions must have the following structure:
def function_name(arg1, arg2):
def fun_name2(obj, eng):
# do stuff
return fun_name2
"""
def __init__(self, engine=None, workflow=[],
uuid=None, deposition_type=None, user_id=None):
self.obj = {}
self.set_user_id(user_id)
self.set_uuid(uuid)
self.deposition_type = deposition_type
self.current_step = 0
self.set_engine(engine)
self.set_workflow(workflow)
self.set_object()
def set_uuid(self, uuid=None):
""" Sets the uuid or obtains a new one """
if uuid is None:
uuid = new_uuid()
self.uuid = uuid
else:
self.uuid = uuid
def get_uuid(self):
return self.uuid
def set_engine(self, engine=None):
""" Initializes the BibWorkflow engine """
if engine is None:
engine = BibWorkflowEngine(name=self.get_deposition_type(),
uuid=self.get_uuid(),
user_id=self.get_user_id(),
module_name="webdeposit")
self.eng = engine
self.eng.save()
def set_workflow(self, workflow):
""" Sets the workflow """
self.eng.setWorkflow(workflow)
self.workflow = workflow
self.steps_num = len(workflow)
self.obj['steps_num'] = self.steps_num
def set_object(self):
self.db_workflow_obj = \
WfeObject.query.filter(WfeObject.workflow_id == self.get_uuid()). \
first()
if self.db_workflow_obj is None:
self.bib_obj = BibWorkflowObject(data=self.obj,
workflow_id=self.get_uuid(),
user_id=self.get_user_id())
else:
self.bib_obj = BibWorkflowObject(wfobject_id=self.db_workflow_obj.id,
workflow_id=self.get_uuid(),
user_id=self.get_user_id())
def get_object(self):
return self.bib_obj
def set_deposition_type(self, deposition_type=None):
if deposition_type is not None:
self.obj['deposition_type'] = deposition_type
def get_deposition_type(self):
return self.obj['deposition_type']
deposition_type = property(get_deposition_type, set_deposition_type)
def set_user_id(self, user_id=None):
if user_id is not None:
self.user_id = user_id
else:
from invenio.webuser_flask import current_user
self.user_id = current_user.get_id()
self.obj['user_id'] = self.user_id
def get_user_id(self):
return self.user_id
def get_status(self):
""" Returns the status of the workflow
(check CFG_WORKFLOW_STATUS from bibworkflow_engine)
"""
status = \
Workflow.query. \
filter(Workflow.uuid == self.get_uuid()).\
one().status
return status
def get_output(self, form_validation=None):
""" Returns a representation of the current state of the workflow
(a dict with the variables to fill the jinja template)
"""
user_id = self.user_id
uuid = self.get_uuid()
from invenio.webdeposit_utils import get_form, \
draft_field_get_all
form = get_form(user_id, uuid)
deposition_type = self.obj['deposition_type']
drafts = draft_field_get_all(user_id, deposition_type)
if form_validation:
form.validate()
# Get the template from configuration for this form
template = form.config.get_template() or 'webdeposit_add.html'
return dict(template_name_or_list=template,
workflow=self,
deposition_type=deposition_type,
form=form,
drafts=drafts,
uuid=uuid)
def run(self):
""" Runs or resumes the workflow """
finished = self.eng.db_obj.counter_finished > 1
if finished:
# The workflow is finished, nothing to do
return
wfobjects = \
WfeObject.query. \
filter(WfeObject.workflow_id == self.get_uuid())
wfobject = max(wfobjects.all(), key=lambda w: w.modified)
starting_point = wfobject.task_counter
restart_workflow(self.eng, [self.bib_obj],
starting_point, stop_on_halt=True)
def run_next_step(self):
if self.current_step >= self.steps_num:
self.obj['break'] = True
return
function = self.workflow[self.current_step]
function(self.obj, self)
self.current_step += 1
self.obj['step'] = self.current_step
def jump_forward(self):
restart_workflow(self.eng, [self.bib_obj], 'next', stop_on_halt=True)
def jump_backwards(self, dummy_synchronize=False):
if self.current_step > 1:
self.current_step -= 1
else:
self.current_step = 1
def get_workflow_from_db(self):
return Workflow.query.filter(Workflow.uuid == self.get_uuid()).first()
def cook_json(self):
user_id = self.obj['user_id']
uuid = self.get_uuid()
from invenio.webdeposit_utils import get_form
json_reader = JsonReader()
for step in range(self.steps_num):
try:
form = get_form(user_id, uuid, step)
json_reader = form.cook_json(json_reader)
except:
# some steps don't have any form ...
pass
return json_reader
def get_data(self, key):
if key in self.bib_obj.data:
return self.bib_obj.data[key]
else:
return None
| labordoc/labordoc-next | modules/webdeposit/lib/webdeposit_workflow.py | Python | gpl-2.0 | 7,407 | 0.00216 |
# -*- mode: python; coding: utf-8; -*-
VERSION = (1, 3, 3)
__version__ = '.'.join(map(str, VERSION))
__author__ = 'Joe Vasquez'
__email__ = 'joe.vasquez@gmail.com'
__license__ = 'MIT' | jobscry/vz-blog | __init__.py | Python | mit | 183 | 0.005464 |
from datetime import date
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils.dateformat import format as format_date
from django.shortcuts import get_object_or_404
from django.http import Http404
from haystack.generic_views import SearchView
from .models import Post, Category
class _PostsListView(ListView):
"""
Base class for displaying post lists
"""
template_name = '{0}/blog_posts_list.html'.format(settings.CURRENT_SKIN)
context_object_name = 'posts'
paginate_by = settings.BLOG_POSTS_PAGINATE_BY
def get_queryset(self):
return super().get_queryset().prefetch_related('categories')
class _PageTitleMixIn:
"""
Adds page_title to ListView's context
"""
page_title = None
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['page_title'] = self.page_title
return context
class BlogHomeView(_PostsListView):
"""
Displays the list of all published posts starting from the recent.
Template: ``blog_posts_list.html``
Specific context variable: ``posts``
"""
queryset = Post.objects.published()
class BlogFeaturedPostsView(_PageTitleMixIn, _PostsListView):
"""
Displays the list of featured posts
Template: ``blog_posts_list.html``
Specific context variables:
- ``posts``
- ``page_title``
"""
queryset = Post.objects.featured()
page_title = _('Featured Posts')
class BlogCategoryView(_PageTitleMixIn, _PostsListView):
"""
Displays the list of posts in a given category
Template: ``blog_posts_list.html``
Specific context variables:
- ``posts``
- ``page_title``
"""
def get_queryset(self):
category = get_object_or_404(Category, slug=self.kwargs['slug'])
self.page_title = _('Posts in "{0}" category'.format(category.name))
return Post.objects.published().filter(categories__pk=category.pk)
class BlogCategoriesListView(_PageTitleMixIn, ListView):
"""
Displays the list of categories that have posts in them
Template: ``blog_categories_list.html``
Specific context variables:
- ``categories``
- ``page_title``
"""
template_name = '{0}/blog_categories_list.html'.format(settings.CURRENT_SKIN)
queryset = Category.objects.non_empty()
page_title = _('Categories')
context_object_name = 'categories'
class BlogPostView(DetailView):
"""
Displays a blog post page
Template: ``blog_post.html``
Specific context variable: ``post``
"""
template_name = '{0}/blog_post.html'.format(settings.CURRENT_SKIN)
model = Post
context_object_name = 'post'
query_pk_and_slug = True
def dispatch(self, request, *args, **kwargs):
self.request = request
return super().dispatch(request, *args, **kwargs)
def get_object(self, queryset=None):
"""
Prevent non-authenticated users from viewing unpublished posts
"""
post = super().get_object(queryset)
if not(post.is_published or self.request.user.is_authenticated):
raise Http404
return post
class BlogArchiveView(_PageTitleMixIn, ListView):
"""
Displays the blog archive by years and months
Template: ``blog_archive.html``
Specific context variables:
- ``months`` -- the list of class:`datetime.data` objects representing months
- ``page_title``
"""
template_name = '{0}/blog_archive.html'.format(settings.CURRENT_SKIN)
queryset = Post.objects.published().dates('date_published', 'month', order='DESC')
context_object_name = 'months'
page_title = _('Blog Archive')
class BlogMonthArchiveView(_PageTitleMixIn, _PostsListView):
"""
Displays the list of posts by year and month
Template: ``blog_posts_list.html``
Specific context variables:
- ``posts``
- ``page_title``
"""
def get_queryset(self):
year = int(self.kwargs['year'])
month = int(self.kwargs['month'])
self.page_title = _('Blog Archive, {0}').format(format_date(date(year=year, month=month, day=1), 'F Y'))
return Post.objects.published().filter(date_published__year=year, date_published__month=month)
class BlogPostSearchView(SearchView):
"""
Displays the search page
Template: ``blog_search.html``
Specific context variables: none.
"""
template_name = '{0}/blog_search.html'.format(settings.CURRENT_SKIN)
paginate_by = 10
def get_queryset(self):
return super().get_queryset().highlight()
| romanvm/romans_blog | blog/views.py | Python | gpl-3.0 | 4,713 | 0.001061 |
# Copyright (C) 2012 University of Southern California
# This software is licensed under the GPLv3 license, included in
# ./GPLv3-LICENSE.txt in the source distribution
from collections import defaultdict
import itertools
import logging
import os
import shlex
import sys
from magi.util import helpers
from magi.util.execl import execAndRead, pipeIn
from base import Testbed, IFObj
log = logging.getLogger(__name__)
class EmulabTestbed(Testbed):
def __init__(self):
Testbed.__init__(self)
self._store = {}
""" Testbed Properties (readonly) """
@property
def experiment(self):
""" the experiment name """
return self.getExperiment()
@property
def project(self):
""" the project name """
return self.getProject()
@property
def eid(self):
""" the experiment 'id' string """
return self.getExperimentID()
def getExperiment(self):
if 'experiment' not in self._store:
self.loadEID()
return self._store['experiment']
def getProject(self):
if 'project' not in self._store:
self.loadEID()
return self._store['project']
def getExperimentID(self):
if 'eid' not in self._store:
self.loadEID()
return self._store['eid']
def getExperimentDir(self):
return os.path.join('/proj', self.getProject(), 'exp', self.getExperiment())
def toControlPlaneNodeName(self, nodename):
if nodename not in ['localhost', '127.0.0.1'] and '.' not in nodename:
nodename += '.%s.%s' % (self.getExperiment(), self.getProject())
return nodename
""" Queries for this Node """
def getLocalVirtualNodes(self):
""" Get all the virtual nodes hosted by this machine """
ret = list()
for l in self.readAllLines(pipeIn('/usr/local/etc/emulab/tmcc vnodelist')):
try:
ret.append(self.parseVarLine(l)['VNODEID'])
except:
pass
return ret
def parseVarLine(self, line):
args = {}
for x in shlex.split(line):
sp = x.split('=')
if sp[0] == '':
continue
if (len(sp) == 1):
args[sp[0]] = '1'
else:
args[sp[0]] = sp[1]
return args
def amAVirtualNode(self):
""" return true if I am a virtual node (i.e. not a physical node or virtual host) """
return len(execAndRead(["/usr/local/etc/emulab/tmcc", "jailconfig"])[0]) > 0
""" Functions that actually load the data into our _store """
def loadEID(self):
""" Load the nickname file to get the node, experiment and project names """
try:
self._store.update(node='?', experiment='?', project='?', eid='?')
nickname = self.getNicknameData()
p = nickname.split('.')
self._store.update(node=p[0], experiment=p[1], project=p[2],
eid=p[2]+"/"+p[1])
except:
log.exception("Can't load my host info")
def setEID(self, node=None, experiment=None, project=None):
""" Set the node, experiment, and project name """
if node:
self._store.update(node=node)
if experiment:
self._store.update(experiment=experiment)
if project:
self._store.update(project=project)
self._store.update(eid=self.project+"/"+self.experiment)
def loadControlInfo(self):
""" Load the control IP address and IF name files """
try:
self._store.update(controlip='?', controlif='?')
nickname = self.getNicknameData()
self._store['controlip'] = self.getHostForName(nickname)
self._store['controlif'] = self.getControlIfData()
except:
log.exception("Can't load control interface info")
def loadIfConfig(self):
""" Load all of the interface info from emulab/boot/tmcc/ifconfig """
try:
iflist = []
# Split into lines, and parse the K=V pieces
for line in self.getIfconfigData():
args = self.parseVarLine(line)
inet = args.get('INET', '')
mask = args.get('MASK', '')
# virtual nodes have no MAC, instead they have a VMAC
mac = args.get('MAC', args.get('VMAC',''))
name = self.getIfFor(inet, mac)
if inet == '' or mac == '': continue
iflist.append(IFObj(inet, name, mac, mask))
self._store['iflist'] = iflist
except:
log.exception("Can't load interface config data")
def loadTopoGraph(self):
try:
import networkx as nx
nodelist = False
linkToNodeList = defaultdict(set)
graph = nx.Graph()
for e in self.getTopomap():
if not nodelist:
if "# nodes" in e:
nodelist = True
continue
if "# lans" in e:
break
node = e.split(",")[0]
links = e.split(",")[1].split()
linksInfo = dict()
for link in links:
linkName = link.split(":")[0]
ip = link.split(":")[1]
linkToNodeList[linkName].add(node)
linksInfo[linkName] = {'name':linkName, 'ip':ip}
graph.add_node(node, links=linksInfo)
for linkName in linkToNodeList.keys():
nodeSet = linkToNodeList[linkName]
for node in nodeSet:
graph.node[node]['links'][linkName]['peerNodes'] = list(nodeSet - set([node]))
graph.add_edges_from(list(itertools.combinations(nodeSet, 2)), linkName=linkName)
self._store['topograph'] = graph
except:
log.exception("Can't load topology graph")
""" Abstracted 'readers' of data from 'locations' """
def getSwapperData(self): return self.readFirstLine(pipeIn('/usr/local/etc/emulab/tmcc creator'))
def getNicknameData(self): return self.readFirstLine(open('/var/emulab/boot/nickname', 'r'))
def getControlIfData(self): return self.readFirstLine(open('/var/emulab/boot/controlif', 'r'))
def getIfconfigData(self): return self.readAllLines(pipeIn('/usr/local/etc/emulab/tmcc ifconfig'))
def getTopomap(self): return self.readAllLines(open('/var/emulab/boot/topomap'))
def getIfFor(self, inet, mac):
if (sys.platform == 'cygwin'):
return execAndRead("ip2pcapif %s" % (inet))[0].strip()
else:
return execAndRead("/usr/local/etc/emulab/findif %s" % (mac))[0].strip()
def getMulticastAddress(self):
return helpers.getMulticast(self.project, self.experiment, 0)
# Small test if running this file directly
if __name__ == "__main__":
logging.basicConfig()
x = EmulabTestbed()
print 'Node Name:', x.nodename
print 'FQDN:', x.fqdn
print 'Control IP:', x.controlip
print 'Control IF:', x.controlif
print 'Server Node:', x.getServer()
iplist = x.getLocalIPList()
print 'Exp. Addresses: %s' % iplist
print 'Exp. Interface info:'
for ip in iplist:
print '\t%s: %s' % (ip, x.getInterfaceInfo(ip))
| deter-project/magi | magi/testbed/emulab.py | Python | gpl-2.0 | 7,564 | 0.006478 |
"""
Small Modification of src/examples/Geometry/geometry_demos.py
"""
from OCC.gp import *
from OCC.Geom import *
from OCC.TColGeom import *
from OCC.TColgp import *
from OCC.GeomConvert import *
from OCC.BRepBuilderAPI import *
from OCC.TopoDS import *
from OCC.STEPControl import *
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
def bezier_surfaces(event=None):
display.EraseAll()
array1 = TColgp_Array2OfPnt(1, 3, 1, 3)
array2 = TColgp_Array2OfPnt(1, 3, 1, 3)
array3 = TColgp_Array2OfPnt(1, 3, 1, 3)
array4 = TColgp_Array2OfPnt(1, 3, 1, 3)
array1.SetValue(1, 1, gp_Pnt(1, 1, 1))
array1.SetValue(1, 2, gp_Pnt(2, 1, 2))
array1.SetValue(1, 3, gp_Pnt(3, 1, 1))
array1.SetValue(2, 1, gp_Pnt(1, 2, 1))
array1.SetValue(2, 2, gp_Pnt(2, 2, 2))
array1.SetValue(2, 3, gp_Pnt(3, 2, 0))
array1.SetValue(3, 1, gp_Pnt(1, 3, 2))
array1.SetValue(3, 2, gp_Pnt(2, 3, 1))
array1.SetValue(3, 3, gp_Pnt(3, 3, 0))
array2.SetValue(1, 1, gp_Pnt(3, 1, 1))
array2.SetValue(1, 2, gp_Pnt(4, 1, 1))
array2.SetValue(1, 3, gp_Pnt(5, 1, 2))
array2.SetValue(2, 1, gp_Pnt(3, 2, 0))
array2.SetValue(2, 2, gp_Pnt(4, 2, 1))
array2.SetValue(2, 3, gp_Pnt(5, 2, 2))
array2.SetValue(3, 1, gp_Pnt(3, 3, 0))
array2.SetValue(3, 2, gp_Pnt(4, 3, 0))
array2.SetValue(3, 3, gp_Pnt(5, 3, 1))
array3.SetValue(1, 1, gp_Pnt(1, 3, 2))
array3.SetValue(1, 2, gp_Pnt(2, 3, 1))
array3.SetValue(1, 3, gp_Pnt(3, 3, 0))
array3.SetValue(2, 1, gp_Pnt(1, 4, 1))
array3.SetValue(2, 2, gp_Pnt(2, 4, 0))
array3.SetValue(2, 3, gp_Pnt(3, 4, 1))
array3.SetValue(3, 1, gp_Pnt(1, 5, 1))
array3.SetValue(3, 2, gp_Pnt(2, 5, 1))
array3.SetValue(3, 3, gp_Pnt(3, 5, 2))
array4.SetValue(1, 1, gp_Pnt(3, 3, 0))
array4.SetValue(1, 2, gp_Pnt(4, 3, 0))
array4.SetValue(1, 3, gp_Pnt(5, 3, 1))
array4.SetValue(2, 1, gp_Pnt(3, 4, 1))
array4.SetValue(2, 2, gp_Pnt(4, 4, 1))
array4.SetValue(2, 3, gp_Pnt(5, 4, 1))
array4.SetValue(3, 1, gp_Pnt(3, 5, 2))
array4.SetValue(3, 2, gp_Pnt(4, 5, 2))
array4.SetValue(3, 3, gp_Pnt(5, 5, 1))
BZ1 = Geom_BezierSurface(array1)
BZ2 = Geom_BezierSurface(array2)
BZ3 = Geom_BezierSurface(array3)
BZ4 = Geom_BezierSurface(array4)
bezierarray = TColGeom_Array2OfBezierSurface(1, 2, 1, 2)
bezierarray.SetValue(1, 1, BZ1.GetHandle())
bezierarray.SetValue(1, 2, BZ2.GetHandle())
bezierarray.SetValue(2, 1, BZ3.GetHandle())
bezierarray.SetValue(2, 2, BZ4.GetHandle())
BB = GeomConvert_CompBezierSurfacesToBSplineSurface(bezierarray)
if BB.IsDone():
poles = BB.Poles().GetObject().Array2()
uknots = BB.UKnots().GetObject().Array1()
vknots = BB.VKnots().GetObject().Array1()
umult = BB.UMultiplicities().GetObject().Array1()
vmult = BB.VMultiplicities().GetObject().Array1()
udeg = BB.UDegree()
vdeg = BB.VDegree()
BSPLSURF = Geom_BSplineSurface( poles, uknots, vknots, umult, vmult, udeg, vdeg, 0, 0 )
BSPLSURF.Translate(gp_Vec(0,0,2))
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
if __name__ == '__main__':
bezier_surfaces() | GeoMop/PythonOCC_Examples | src/bspline_surface.py | Python | gpl-2.0 | 3,394 | 0.003536 |
"""
Nonlinear cartoon+texture decomposition ipol demo web app
"""
from lib import base_app, build, http, image
from lib.misc import ctime
from lib.misc import prod
from lib.base_app import init_app
import shutil
import cherrypy
from cherrypy import TimeoutError
import os.path
import time
from math import ceil
class app(base_app):
""" nonlinear cartoon+texture decomposition """
title = "Cartoon+Texture Image Decomposition"
xlink_article = 'http://www.ipol.im/pub/art/2011/blmv_ct/'
input_nb = 1
input_max_pixels = 700 * 700 # max size (in pixels) of an input image
input_max_weight = 10 * 1024 * 1024 # max size (in bytes) of an input file
input_dtype = '3x8i' # input image expected data type
input_ext = '.png' # input image expected extension (ie file format)
is_test = False
def __init__(self):
"""
app setup
"""
# setup the parent class
base_dir = os.path.dirname(os.path.abspath(__file__))
base_app.__init__(self, base_dir)
# select the base_app steps to expose
# index() and input_xxx() are generic
base_app.index.im_func.exposed = True
base_app.input_select.im_func.exposed = True
base_app.input_upload.im_func.exposed = True
# params() is modified from the template
base_app.params.im_func.exposed = True
# result() is modified from the template
base_app.result.im_func.exposed = True
def build(self):
"""
program build/update
"""
# store common file path in variables
tgz_url = "http://www.ipol.im/pub/art/2011/blmv_ct/srcB.tar.gz"
tgz_file = self.dl_dir + "srcB.tar.gz"
progs = ["cartoonIpol"]
src_bin = dict([(self.src_dir + os.path.join("srcB", prog),
self.bin_dir + prog)
for prog in progs])
log_file = self.base_dir + "build.log"
# get the latest source archive
build.download(tgz_url, tgz_file)
# test if any dest file is missing, or too old
if all([(os.path.isfile(bin_file)
and ctime(tgz_file) < ctime(bin_file))
for bin_file in src_bin.values()]):
cherrypy.log("not rebuild needed",
context='BUILD', traceback=False)
else:
# extract the archive
build.extract(tgz_file, self.src_dir)
# build the programs
build.run("make -j4 -C %s %s"
% (self.src_dir + "srcB", " ".join(progs)),
stdout=log_file)
# save into bin dir
if os.path.isdir(self.bin_dir):
shutil.rmtree(self.bin_dir)
os.mkdir(self.bin_dir)
for (src, dst) in src_bin.items():
shutil.copy(src, dst)
# cleanup the source dir
shutil.rmtree(self.src_dir)
return
#
# PARAMETER HANDLING
#
def select_subimage(self, x0, y0, x1, y1):
"""
cut subimage from original image
"""
# draw selected rectangle on the image
imgS = image(self.work_dir + 'input_0.png')
imgS.draw_line([(x0, y0), (x1, y0), (x1, y1), (x0, y1), (x0, y0)],
color="red")
imgS.draw_line([(x0+1, y0+1), (x1-1, y0+1), (x1-1, y1-1), (x0+1, y1-1),
(x0+1, y0+1)], color="white")
imgS.save(self.work_dir + 'input_0s.png')
# crop the image
# try cropping from the original input image (if different from input_0)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
if (dx != dx0) :
z = float(dx0)/float(dx)
im0.crop((int(x0*z), int(y0*z), int(x1*z), int(y1*z)))
# resize if cropped image is too big
if self.input_max_pixels and prod(im0.size) > self.input_max_pixels:
im0.resize(self.input_max_pixels, method="antialias")
img = im0
else :
img.crop((x0, y0, x1, y1))
# save result
img.save(self.work_dir + 'input_0.sel.png')
return
@cherrypy.expose
@init_app
def params(self, newrun=False, msg=None, x0=None, y0=None,
x1=None, y1=None, scale="3.0"):
"""
configure the algo execution
"""
if newrun:
self.clone_input()
if x0:
self.select_subimage(int(x0), int(y0), int(x1), int(y1))
return self.tmpl_out("params.html", msg=msg, x0=x0, y0=y0,
x1=x1, y1=y1, scale=scale)
@cherrypy.expose
@init_app
def rectangle(self, action=None, scale=None,
x=None, y=None, x0=None, y0=None):
"""
select a rectangle in the image
"""
if action == 'run':
if x == None:
#save parameter
try:
self.cfg['param'] = {'scale' : scale}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect scale parameter.")
else:
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : int(x0),
'y0' : int(y0),
'x1' : int(x),
'y1' : int(y)}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
# use the whole image if no subimage is available
try:
img = image(self.work_dir + 'input_0.sel.png')
except IOError:
img = image(self.work_dir + 'input_0.png')
img.save(self.work_dir + 'input_0.sel.png')
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
else:
# use a part of the image
if x0 == None:
# first corner selection
x = int(x)
y = int(y)
# draw a cross at the first corner
img = image(self.work_dir + 'input_0.png')
img.draw_cross((x, y), size=4, color="white")
img.draw_cross((x, y), size=2, color="red")
img.save(self.work_dir + 'input.png')
return self.tmpl_out("params.html", scale=scale, x0=x, y0=y)
else:
# second corner selection
x0 = int(x0)
y0 = int(y0)
x1 = int(x)
y1 = int(y)
# reorder the corners
(x0, x1) = (min(x0, x1), max(x0, x1))
(y0, y1) = (min(y0, y1), max(y0, y1))
assert (x1 - x0) > 0
assert (y1 - y0) > 0
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : x0,
'y0' : y0,
'x1' : x1,
'y1' : y1}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
#select subimage
self.select_subimage(x0, y0, x1, y1)
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
@cherrypy.expose
@init_app
def wait(self):
"""
run redirection
"""
http.refresh(self.base_url + 'run?key=%s' % self.key)
return self.tmpl_out("wait.html")
@cherrypy.expose
@init_app
def run(self):
"""
algorithm execution
"""
# read the parameters
scale = self.cfg['param']['scale']
# run the algorithm
stdout = open(self.work_dir + 'stdout.txt', 'w')
try:
run_time = time.time()
self.run_algo(scale, stdout=stdout)
self.cfg['info']['run_time'] = time.time() - run_time
except TimeoutError:
return self.error(errcode='timeout')
except RuntimeError:
return self.error(errcode='runtime')
stdout.close()
http.redir_303(self.base_url + 'result?key=%s' % self.key)
# archive
if self.cfg['meta']['original']:
ar = self.make_archive()
ar.add_file("input_0.orig.png", info="uploaded image")
# save processed image (if different from uploaded)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
imgsel = image(self.work_dir + 'input_0.sel.png')
dxsel = imgsel.size[0]
if (dx != dx0) or (dxsel != dx):
ar.add_file("input_0.sel.png", info="original input image")
ar.add_file("cartoon.png", info="cartoon image")
ar.add_file("texture.png", info="texture image")
ar.add_info({"scale": scale})
ar.save()
return self.tmpl_out("run.html")
def run_algo(self, scale, stdout=None, timeout=False):
"""
the core algo runner
could also be called by a batch processor
this one needs no parameter
"""
#cartoon-texture images
p = self.run_proc(['cartoonIpol', 'input_0.sel.png', str(scale),
'cartoon.png', 'texture.png'],
stdout=None, stderr=None)
self.wait_proc(p, timeout)
@cherrypy.expose
@init_app
def result(self):
"""
display the algo results
"""
# read the parameters
scale = self.cfg['param']['scale']
try:
x0 = self.cfg['param']['x0']
except KeyError:
x0 = None
try:
y0 = self.cfg['param']['y0']
except KeyError:
y0 = None
try:
x1 = self.cfg['param']['x1']
except KeyError:
x1 = None
try:
y1 = self.cfg['param']['y1']
except KeyError:
y1 = None
(sizeX, sizeY)=image(self.work_dir + 'input_0.sel.png').size
# Resize for visualization (new size of the smallest dimension = 200)
zoom_factor = None
if (sizeX < 200) or (sizeY < 200):
if sizeX > sizeY:
zoom_factor = int(ceil(200.0/sizeY))
else:
zoom_factor = int(ceil(200.0/sizeX))
sizeX = sizeX*zoom_factor
sizeY = sizeY*zoom_factor
im = image(self.work_dir + 'input_0.sel.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'input_0_zoom.sel.png')
im = image(self.work_dir + 'cartoon.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'cartoon_zoom.png')
im = image(self.work_dir + 'texture.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'texture_zoom.png')
return self.tmpl_out("result.html", scale=scale,
x0=x0, y0=y0, x1=x1, y1=y1,
sizeY=sizeY, zoom_factor=zoom_factor)
| juan-cardelino/matlab_demos | ipol_demo-light-1025b85/app_available/blmv_nonlinear_cartoon_texture_decomposition/app.py | Python | gpl-2.0 | 11,626 | 0.006795 |
#pylint: disable=R0901,R0902,R0904
from __future__ import (absolute_import, division, print_function)
from six.moves import range
import numpy as np
import os
from PyQt4.QtGui import QSizePolicy
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from mpl_toolkits.mplot3d import Axes3D
class MplPlot3dCanvas(FigureCanvas):
"""
Matplotlib 3D canvas class
"""
def __init__(self, parent=None):
"""
Initialization
:return:
"""
#
self._myParentWindow = parent
# Initialize the figure
self._myFigure = Figure()
# Init canvas
FigureCanvas.__init__(self, self._myFigure)
FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
# Axes
self._myAxes = Axes3D(self._myFigure) # Canvas figure must be created for mouse rotation
self.format_coord_org = self._myAxes.format_coord
self._myAxes.format_coord = self.report_pixel
# color
self._colorMap = [0.5, 0.5, 0.5]
# Others
self._dataKey = 0
self._dataDict = dict()
# List of plots on canvas NOW
self._currPlotList = list()
self._currSurfaceList = list() # [{"xx":,"yy:","val:"}]
return
def clear_3d_plots(self):
"""
Clear all the figures from canvas
:return:
"""
for plt in self._currPlotList:
# del plt
self._myAxes.collections.remove(plt)
self._currPlotList = []
return
def get_data(self, data_key):
""" Get data by data key
:param data_key:
:return:
"""
assert data_key in self._dataDict, 'Data key %s does not exist in %s.' % (str(data_key),
str(self._dataDict.keys()))
return self._dataDict[data_key]
def import_3d_data(self, points, intensities):
"""
:param points:
:param intensities:
:return:
"""
# check
assert isinstance(points, np.ndarray) and points.shape[1] == 3, 'Shape is %s.' % str(points.shape)
assert isinstance(intensities, np.ndarray) and len(points) == len(intensities)
# set
self._dataDict[self._dataKey] = (points, intensities)
# update
r_value = self._dataKey
self._dataKey += 1
return r_value
def import_data_from_file(self, file_name):
""" File will have more than 4 columns, as X, Y, Z, Intensity, ...
:param file_name:
:return:
"""
# check
assert isinstance(file_name, str) and os.path.exists(file_name)
# parse
data_file = open(file_name, 'r')
raw_lines = data_file.readlines()
data_file.close()
# construct ND data array
xyz_points = np.zeros((len(raw_lines), 3))
intensities = np.zeros((len(raw_lines), ))
# parse
for i in range(len(raw_lines)):
line = raw_lines[i].strip()
# skip empty line
if len(line) == 0:
continue
# set value
terms = line.split(',')
for j in range(3):
xyz_points[i][j] = float(terms[j])
intensities[i] = float(terms[3])
# END-FOR
# Add to data structure for managing
self._dataDict[self._dataKey] = (xyz_points, intensities)
return_value = self._dataKey
self._dataKey += 1
return return_value
def plot_scatter(self, points, color_list):
"""
Plot points with colors in scatter mode
:param points:
:param color_list:
:return:
"""
# check: [TO DO] need MORE!
assert isinstance(points, np.ndarray)
assert len(points) == len(color_list)
assert points.shape[1] == 3, '3D data %s.' % str(points.shape)
#
# plot scatters
plt = self._myAxes.scatter(points[:, 0], points[:, 1], points[:, 2],
zdir='z', c=color_list)
self._currPlotList.append(plt)
self.draw()
return
def plot_scatter_auto(self, data_key, base_color=None):
"""
Plot data in scatter plot in an automatic mode
:param data_key: key to locate the data stored to this class
:param base_color: None or a list of 3 elements from 0 to 1 for RGB
:return:
"""
# Check
assert isinstance(data_key, int) and data_key >= 0
assert base_color is None or len(base_color) == 3
# get data and check
points = self._dataDict[data_key][0]
intensities = self._dataDict[data_key][1]
assert isinstance(points, np.ndarray)
assert isinstance(points.shape, tuple)
assert points.shape[1] == 3, '3D data %s.' % str(points.shape)
if len(points) > 1:
# set x, y and z limit
x_min = min(points[:, 0])
x_max = max(points[:, 0])
d_x = x_max - x_min
# print(x_min, x_max)
y_min = min(points[:, 1])
y_max = max(points[:, 1])
d_y = y_max - y_min
# print(y_min, y_max)
z_min = min(points[:, 2])
z_max = max(points[:, 2])
d_z = z_max - z_min
print(z_min, z_max)
# use default setup
self._myAxes.set_xlim(x_min-d_x, x_max+d_x)
self._myAxes.set_ylim(y_min-d_y, y_max+d_y)
self._myAxes.set_zlim(z_min-d_z, z_max+d_z)
# END-IF
# color map for intensity
color_list = list()
if base_color is None:
color_r = self._colorMap[0]
color_g = self._colorMap[1]
else:
color_r = base_color[0]
color_g = base_color[1]
if len(intensities) > 1:
min_intensity = min(intensities)
max_intensity = max(intensities)
diff = max_intensity - min_intensity
b_list = intensities - min_intensity
b_list = b_list/diff
num_points = len(points[:, 2])
for index in range(num_points):
color_tup = (color_r, color_g, b_list[index])
color_list.append(color_tup)
else:
color_list.append((color_r, color_g, 0.5))
# plot scatters
self._myAxes.scatter(points[:, 0], points[:, 1], points[:, 2], zdir='z', c=color_list)
self.draw()
def plot_surface(self):
"""
Plot surface
:return:
"""
print('Number of surf = ', len(self._currSurfaceList))
for surf in self._currSurfaceList:
plt = self._myAxes.plot_surface(surf["xx"], surf["yy"], surf["val"],
rstride=5, cstride=5, # color map??? cmap=cm.jet,
linewidth=1, antialiased=True)
self._currPlotList.append(plt)
# END-FOR
return
def report_pixel(self, x_d, y_d):
report = self.format_coord_org(x_d, y_d)
report = report.replace(",", " ")
return report
def set_axes_labels(self, x_label, y_label, z_label):
"""
:return:
"""
if x_label is not None:
self._myAxes.set_xlabel(x_label)
if y_label is not None:
self._myAxes.set_ylabel(y_label)
if z_label is not None:
self._myAxes.set_zlabel(z_label)
return
def set_color_map(self, color_r, color_g, color_b):
"""
Set the base line of color map
:param color_r:
:param color_g:
:param color_b:
:return:
"""
# Set color map
assert isinstance(color_r, float), 0 <= color_r < 1.
assert isinstance(color_g, float), 0 <= color_g < 1.
assert isinstance(color_b, float), 0 <= color_b < 1.
self._colorMap = [color_r, color_g, color_b]
def set_title(self, title, font_size):
"""
Set super title
:param title:
:return:
"""
self._myFigure.suptitle(title, fontsize=font_size)
return
def set_xyz_limits(self, points, limits=None):
""" Set XYZ axes limits
:param points:
:param limits: if None, then use default; otherwise, 3-tuple of 2-tuple
:return:
"""
# check
assert isinstance(points, np.ndarray)
# get limit
if limits is None:
limits = get_auto_xyz_limit(points)
# set limit to axes
self._myAxes.set_xlim(limits[0][0], limits[0][1])
self._myAxes.set_ylim(limits[1][0], limits[1][1])
self._myAxes.set_zlim(limits[2][0], limits[2][1])
return
def get_auto_xyz_limit(points):
""" Get default limit on X, Y, Z
Requirements: number of data points must be larger than 0.
:param points:
:return: 3-tuple of 2-tuple as (min, max) for X, Y and Z respectively
"""
# check
assert isinstance(points, np.ndarray)
dim = points.shape[1]
assert dim == 3
# set x, y and z limit
x_min = min(points[:, 0])
x_max = max(points[:, 0])
d_x = x_max - x_min
# print(x_min, x_max)
y_min = min(points[:, 1])
y_max = max(points[:, 1])
d_y = y_max - y_min
# print(y_min, y_max)
z_min = min(points[:, 2])
z_max = max(points[:, 2])
d_z = z_max - z_min
print(z_min, z_max)
# use default setup
x_lim = (x_min-d_x, x_max+d_x)
y_lim = (y_min-d_y, y_max+d_y)
z_lim = (z_min-d_z, z_max+d_z)
return x_lim, y_lim, z_lim
| wdzhou/mantid | scripts/HFIR_4Circle_Reduction/mplgraphicsview3d.py | Python | gpl-3.0 | 9,817 | 0.001121 |
#-------------------------------------------------------------------------------
# PROJECT: VHDL Code Generator
# NAME: Dynamic AND Gate
#
# LICENSE: GNU-GPL V3
#-------------------------------------------------------------------------------
__isBlock__ = True
__className__ = "ANDGate"
__win__ = "ANDGateWindow"
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4 import uic
from lib.Block import *
class ANDGate(Block):
""" AND Gate
PORTS SPECIFICATIONS
"""
# TODO: Specifications of AND Gate (Documentation)
def __init__(self,system,numInput,sizeInput):
"""
:param name:
:param numInput: Number of input
:param size: Size of each input
:param system:
"""
self.numInput = numInput
self.name = "AND_GATE"
self.sizeInput = sizeInput
input_vector = [sizeInput]*self.numInput
output_vector = [sizeInput]
super().__init__(input_vector,output_vector,system,self.name)
def generate(self):
filetext = ""
if self.getOutputSignalSize(0) == 1:
filetext += "%s <= %s"%(self.getOutputSignalName(0),self.getInputSignalName(0))
for i in range(1,self.numInput):
filetext += " and %s"%(self.getInputSignalName(i))
else:
filetext += "%s <= "%self.getOutputSignalName(0)
for i in range (self.sizeInput):
filetext += "%s[%d]"%(self.getInputSignalName(0),self.sizeInput-i-1)
for j in range(1,self.numInput):
filetext += " and %s[%d]"%(self.getInputSignalName(j),self.sizeInput-i-1)
if i != self.sizeInput - 1:
filetext += " & "
filetext += ";\n"
return filetext
class ANDGateWindow(QWidget):
accept = pyqtSignal(list)
def __init__(self,parent = None):
super().__init__()
self.ui = uic.loadUi("blocks\\Standard Library\\Gate.ui",self)
self.ui.acceptButton.clicked.connect(self.accepted)
self.ui.setWindowTitle("AND GATE")
def accepted(self):
numInput = self.ui.numInput.value()
sizeInput = self.ui.sizeInput.value()
self.accept.emit([numInput,sizeInput])
self.close()
| BlakeTeam/VHDLCodeGenerator | blocks/Standard Library/Gate AND.py | Python | gpl-3.0 | 2,291 | 0.014841 |
# Raymond Hettingers
# http://code.activestate.com/recipes/576647/
from itertools import permutations
def queen_gf(n):
cols = range(n)
for ans in permutations(cols):
if (n == len(set(ans[i]+i for i in cols)) ==
len(set(ans[i]-i for i in cols))):
yield ans
| yehnan/python_book_yehnan | ch06/ch06_8queen_hettingers_gf.py | Python | gpl-2.0 | 315 | 0.003175 |
../../../../../share/pyshared/twisted/test/test_usage.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/test/test_usage.py | Python | gpl-3.0 | 56 | 0.017857 |
# Copyright 2012, 2013 Arndt Droullier, Nive GmbH. All rights reserved.
# Released under GPL3. See license.txt
#
__doc__ = """
Image
-----
The image element inserts images into the web page.
Images uploaded as fullsize will be be linked as pop ups.
If the Python Image Library (PIL) is installed automated image conversion on upload can be
activated by adding `nive_cms.extensions.images.ImageProcessor` to configuration.extensions.
::
ProfileImage = Conf(source="imagefull", dest="image", format="JPEG",
quality="85", width=360, height=0, extension="jpg",
condition=CheckDeafult)
configuration.imageProfiles = [ProfileImage]
The default image settings for conversions.
"""
from nive_cms.i18n import _
from nive.definitions import StagPageElement, ObjectConf, FieldConf, Conf
from nive_cms.baseobjects import PageElementFileBase
from nive_cms.extensions.images import PILloaded
class image(PageElementFileBase):
"""
"""
# bw 0.9.11
def Span(self):
# css class span for the css selection
if self.data.cssClass=="teaserl":
return u"span4"
elif self.data.cssClass=="teasers":
return u"span2"
return u"span3"
# image type definition ------------------------------------------------------------------
#@nive_module
configuration = ObjectConf(
id = "image",
name = _(u"Image"),
dbparam = "images",
context = "nive_cms.image.image",
template = "image.pt",
selectTag = StagPageElement,
extensions = [],
icon = "nive_cms.cmsview:static/images/types/image.png",
description = _(u"The image element inserts images into the web page.")
)
configuration.data = [
FieldConf(id="image", datatype="file", size=0, default=u"", name=_(u"Imagefile")),
FieldConf(id="imagefull", datatype="file", size=0, default=u"", name=_(u"Imagefile fullsize")),
FieldConf(id="textblock", datatype="htext",size=100000,default=u"", name=_(u"Text"), fulltext=1, required=0),
FieldConf(id="cssClass", datatype="list", size=10, default=u"", name=_(u"Styling"), listItems=()),
FieldConf(id="link", datatype="url", size=1000, default=u"", name=_(u"Link"))
]
if PILloaded and "nive_cms.extensions.images.ImageProcessor" in configuration.extensions:
fields = ["title", "imagefull", "textblock", "cssClass", "link", "pool_groups"]
else:
fields = ["title", "image", "imagefull", "textblock", "cssClass", "link", "pool_groups"]
configuration.forms = {"create": {"fields":fields}, "edit": {"fields":fields}}
configuration.toJson = ("title", "image", "imagefull", "textblock", "cssClass", "link", "pool_groups", "pool_type", "pool_filename")
configuration.views = []
ProfileImage = Conf(source="imagefull", dest="image", format="JPEG", quality="90", width=360, height=0, extension="jpg")
configuration.imageProfiles = [ProfileImage]
| nive/nive_cms | nive_cms/image.py | Python | gpl-3.0 | 2,927 | 0.015032 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .Menu import MenuBox | Loki88/RSA-Test | ui/Menu/__init__.py | Python | agpl-3.0 | 72 | 0.013889 |
import unittest
import unittest.mock
import functools
from g1.asyncs import kernels
from g1.operations.databases.bases import interfaces
from g1.operations.databases.servers import connections
# I am not sure why pylint cannot lint contextlib.asynccontextmanager
# correctly; let us disable this check for now.
#
# pylint: disable=not-async-context-manager
def synchronous(test_method):
@kernels.with_kernel
@functools.wraps(test_method)
def wrapper(self):
kernels.run(test_method(self))
return wrapper
class ConnectionsTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.conn = unittest.mock.Mock()
self.tx = self.conn.begin.return_value
self.manager = connections.ConnectionManager(self.conn)
unittest.mock.patch.multiple(
connections,
_WAIT_FOR_READER=0.01,
_WAIT_FOR_WRITER=0.01,
).start()
def tearDown(self):
unittest.mock.patch.stopall()
super().tearDown()
def assert_manager(
self,
num_readers,
tx_id,
rollback_tx_ids,
commit_tx_ids,
timeout_tx_ids,
):
self.assertEqual(self.manager._num_readers, num_readers)
self.assertEqual(self.manager._tx_id, tx_id)
self.assertEqual(tuple(self.manager._rollback_tx_ids), rollback_tx_ids)
self.assertEqual(tuple(self.manager._commit_tx_ids), commit_tx_ids)
self.assertEqual(tuple(self.manager._timeout_tx_ids), timeout_tx_ids)
self.assertEqual(self.manager.tx_id, tx_id)
@synchronous
async def test_reading(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.reading() as conn_1:
self.assert_manager(1, 0, (), (), ())
self.assertIs(conn_1, self.conn)
async with self.manager.reading() as conn_2:
self.assert_manager(2, 0, (), (), ())
self.assertIs(conn_2, self.conn)
async with self.manager.reading() as conn_3:
self.assert_manager(3, 0, (), (), ())
self.assertIs(conn_3, self.conn)
self.assert_manager(2, 0, (), (), ())
self.assert_manager(1, 0, (), (), ())
self.assert_manager(0, 0, (), (), ())
self.conn.begin.assert_not_called()
@synchronous
async def test_reading_timeout(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.reading():
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_writing(self):
with self.assertRaises(interfaces.InvalidRequestError):
async with self.manager.writing(0):
pass
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(1):
pass
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
self.assert_manager(0, tx_id, (), (), ())
async with self.manager.writing(tx_id) as conn:
self.assert_manager(0, tx_id, (), (), ())
self.assertIs(conn, self.conn)
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(tx_id + 1):
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting() as conn:
tx_id = self.manager.tx_id
self.assertNotEqual(tx_id, 0)
self.assert_manager(0, tx_id, (), (), ())
self.assertIs(conn, self.conn)
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting_rollback(self):
self.assert_manager(0, 0, (), (), ())
with self.assertRaises(ValueError):
async with self.manager.transacting():
tx_id = self.manager.tx_id
raise ValueError
self.assert_manager(0, 0, (tx_id, ), (), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting_timeout_on_reader(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.reading():
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.transacting():
pass
self.assert_manager(0, 0, (), (), ())
self.conn.begin.assert_not_called()
@synchronous
async def test_transacting_timeout_on_writer(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.transacting():
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_begin(self):
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.begin(0)
self.assert_manager(0, 0, (), (), ())
conn = await self.manager.begin(1)
for _ in range(3): # begin is idempotent.
self.assertIs(await self.manager.begin(1), conn)
self.assertIs(conn, self.conn)
self.assert_manager(0, 1, (), (), ())
with self.assertRaises(interfaces.TransactionTimeoutError):
await self.manager.begin(2)
self.conn.begin.assert_called_once()
@synchronous
async def test_end(self):
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.rollback(0)
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.commit(0)
with self.assertRaises(interfaces.TransactionNotFoundError):
await self.manager.rollback(1)
with self.assertRaisesRegex(AssertionError, r'expect x != 0'):
await self.manager.rollback_due_to_timeout()
with self.assertRaises(interfaces.TransactionNotFoundError):
await self.manager.commit(1)
self.assert_manager(0, 0, (), (), ())
await self.manager.begin(1)
self.assert_manager(0, 1, (), (), ())
with self.assertRaises(interfaces.TransactionNotFoundError):
self.manager.rollback(999)
with self.assertRaises(interfaces.TransactionNotFoundError):
self.manager.commit(999)
self.tx.rollback.assert_not_called()
for _ in range(3): # rollback is idempotent.
self.manager.rollback(1)
self.tx.rollback.assert_called_once()
self.assert_manager(0, 0, (1, ), (), ())
await self.manager.begin(2)
self.tx.commit.assert_not_called()
for _ in range(3): # commit is idempotent.
self.manager.commit(2)
self.tx.commit.assert_called_once()
self.assert_manager(0, 0, (1, ), (2, ), ())
self.tx.rollback.reset_mock()
await self.manager.begin(3)
self.manager.rollback_due_to_timeout()
self.tx.rollback.assert_called_once()
self.assert_manager(0, 0, (1, ), (2, ), (3, ))
await self.manager.begin(1)
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.writing(3):
pass
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(4):
pass
if __name__ == '__main__':
unittest.main()
| clchiou/garage | py/g1/operations/databases/servers/tests/test_connections.py | Python | mit | 8,015 | 0 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def StorageIORMConfigOption(vim, *args, **kwargs):
'''Configuration setting ranges for IORMConfigSpec object.'''
obj = vim.client.factory.create('ns0:StorageIORMConfigOption')
# do some validation checking...
if (len(args) + len(kwargs)) < 3:
raise IndexError('Expected at least 4 arguments got: %d' % len(args))
required = [ 'congestionThresholdOption', 'enabledOption', 'statsCollectionEnabledOption' ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| xuru/pyvisdk | pyvisdk/do/storage_iorm_config_option.py | Python | mit | 1,080 | 0.009259 |
from os.path import dirname, join
import unittest
from cvsgit.cvs import CVS
from cvsgit.changeset import Change
class Test(unittest.TestCase):
def test_rcsfilename(self):
"""Find the RCS file for a working copy path.
"""
cvs = CVS(join(dirname(__file__), 'data', 'zombie'), None)
c = Change(timestamp='',
author='',
log='',
filestatus='',
filename='patches/patch-Makefile',
revision='',
state='',
mode='')
expected = join(cvs.root, 'patches/Attic/patch-Makefile,v')
actual = cvs.rcsfilename(c)
self.assertEqual(expected, actual)
| ustuehler/git-cvs | tests/test_cvs.py | Python | isc | 727 | 0.001376 |
#!/usr/bin/python
import re,cgi,cgitb,sys
import os
import urllib
import Cookie
import datetime
import meowaux as mew
cgitb.enable()
login_signup="""
<ul class='nav navbar-nav' style='float:right; margin-top:7px; margin-right:5px; ' >
<li>
<form action='/login' style='display:inline;' >
<button class='btn btn-success' type='submit'>Log in</button>
</form>
<form action='/register' style='display:inline;' >
<button class='btn btn-warning' type='submit'>Register!</button>
</form>
</li>
</ul>
"""
cookie = Cookie.SimpleCookie()
cookie_hash = mew.getCookieHash( os.environ )
msg,msgType = mew.processCookieMessage( cookie, cookie_hash )
loggedInFlag = False
if ( ("userId" in cookie_hash) and ("sessionId" in cookie_hash) and
(mew.authenticateSession( cookie_hash["userId"], cookie_hash["sessionId"] ) != 0) ):
loggedInFlag = True
template = mew.slurp_file("template/about.html")
nav = mew.slurp_file("template/navbarflush_template.html")
footer = mew.slurp_file("template/footer_template.html")
analytics = mew.slurp_file("template/analytics_template.html")
tmp_str = mew.replaceTemplateMessage( template, msg, "nominal" )
tmp_str = tmp_str.replace( "<!--FOOTER-->", footer)
tmp_str = tmp_str.replace( "<!--ANALYTICS-->", analytics)
if loggedInFlag:
userData = mew.getUser( cookie_hash["userId"] )
nav = mew.processLoggedInNavTemplate( nav, userData["userName"], userData["type"] )
else:
nav = nav.replace( "<!--NAVBAR_USER_CONTEXT-->", login_signup )
tmp_str = tmp_str.replace( "<!--NAVBAR_FLUSH-->", nav)
print "Content-type: text/html; charset=utf-8;"
print cookie.output()
print
print tmp_str
| abetusk/www.meowcad.com | cgi/about.py | Python | agpl-3.0 | 1,664 | 0.020433 |
#
# Sub-module containing nested samplers
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import pints
import numpy as np
try:
from scipy.special import logsumexp
except ImportError: # pragma: no cover
# Older versions
from scipy.misc import logsumexp
class NestedSampler(pints.TunableMethod):
"""
Abstract base class for nested samplers.
Parameters
----------
log_prior : pints.LogPrior
A logprior to draw proposal samples from.
"""
def __init__(self, log_prior):
# Store logprior
if not isinstance(log_prior, pints.LogPrior):
raise ValueError('Given log_prior must extend pints.LogPrior')
# prior accessed by subclasses to do prior sampling in ask() step
self._log_prior = log_prior
# Current value of the threshold log-likelihood value
self._running_log_likelihood = -float('inf')
self._proposed = None
# Initialise active point containers
self._n_active_points = 400
self._n_parameters = self._log_prior.n_parameters()
self._m_active = np.zeros((self._n_active_points,
self._n_parameters + 1))
self._min_index = None
self._accept_count = 0
self._n_evals = 0
def active_points(self):
"""
Returns the active points from nested sampling run.
"""
return self._m_active
def ask(self):
"""
Proposes new point at which to evaluate log-likelihood.
"""
raise NotImplementedError
def _initialise_active_points(self, m_initial, v_fx):
"""
Sets initial active points matrix.
"""
for i, fx in enumerate(v_fx):
self._m_active[i, self._n_parameters] = fx
self._m_active[:, :-1] = m_initial
self._min_index = np.argmin(self._m_active[:, self._n_parameters])
self._set_running_log_likelihood(
self._m_active[self._min_index, self._n_parameters])
def in_initial_phase(self):
"""
For methods that need an initial phase (see
:meth:`needs_initial_phase()`), this method returns ``True`` if the
method is currently configured to be in its initial phase. For other
methods a ``NotImplementedError`` is returned.
"""
raise NotImplementedError
def min_index(self):
""" Returns index of sample with lowest log-likelihood. """
return self._min_index
def n_active_points(self):
"""
Returns the number of active points that will be used in next run.
"""
return self._n_active_points
def n_hyper_parameters(self):
""" See :meth:`TunableMethod.n_hyper_parameters()`. """
raise NotImplementedError
def name(self):
""" Name of sampler """
raise NotImplementedError
def needs_sensitivities(self):
"""
Determines whether sampler uses sensitivities of the solution.
"""
return self._needs_sensitivities
def needs_initial_phase(self):
"""
Returns ``True`` if this method needs an initial phase, for example
ellipsoidal nested sampling has a period of running rejection
sampling before it starts to fit ellipsoids to points.
"""
return False
def running_log_likelihood(self):
"""
Returns current value of the threshold log-likelihood value.
"""
return self._running_log_likelihood
def set_n_active_points(self, active_points):
"""
Sets the number of active points for the next run.
"""
active_points = int(active_points)
if active_points <= 5:
raise ValueError('Number of active points must be greater than 5.')
self._n_active_points = active_points
self._m_active = np.zeros((self._n_active_points,
self._n_parameters + 1))
def set_hyper_parameters(self, x):
"""
See :meth:`TunableMethod.set_hyper_parameters()`.
"""
raise NotImplementedError
def set_initial_phase(self, in_initial_phase):
"""
For methods that need an initial phase (see
:meth:`needs_initial_phase()`), this method toggles the initial phase
algorithm. For other methods a ``NotImplementedError`` is returned.
"""
raise NotImplementedError
def _set_running_log_likelihood(self, running_log_likelihood):
"""
Updates the current value of the threshold log-likelihood value.
"""
self._running_log_likelihood = running_log_likelihood
def tell(self, fx):
"""
If a single evaluation is provided as arguments, a single point is
accepted and returned if its likelihood exceeds the current threshold;
otherwise None is returned.
If multiple evaluations are provided as arguments (for example, if
running the algorithm in parallel), None is returned if no points
have likelihood exceeding threshold; if a single point passes the
threshold, it is returned; if multiple points pass, one is selected
uniformly at random and returned and the others are stored for later
use.
In all cases, two objects are returned: the proposed point (which may
be None) and an array of other points that also pass the threshold
(which is empty for single evaluation mode but may be non-empty for
multiple evaluation mode).
"""
# for serial evaluation just return point or None and an empty array
if np.isscalar(fx):
self._n_evals += 1
if np.isnan(fx) or fx < self._running_log_likelihood:
return None, np.array([[]])
else:
proposed = self._proposed
fx_temp = fx
winners = np.array([[]])
# if running in parallel, then fx will be a sequence
else:
a_len = len(fx)
self._n_evals += a_len
results = []
for i in range(a_len):
if np.isnan(fx[i]) or fx[i] < self._running_log_likelihood:
results.append(None)
else:
results.append(fx[i])
n_non_none = sum(x is not None for x in results)
# if none pass threshold return None and an empty array
if n_non_none == 0:
return None, np.array([[]])
# if one passes then return it and an empty array
elif n_non_none == 1:
fx_temp = next(item for item in results if item is not None)
index = results.index(fx_temp)
proposed = self._proposed[index]
winners = np.array([[]])
# if more than a single point passes select at random from multiple
# non-nones and return it and an array of the other points whose
# likelihood exceeds threshold
else:
fx_short = [i for i in results if i]
idex = [results.index(i) for i in fx_short]
proposed_short = [self._proposed[i] for i in idex]
fx_temp = np.random.choice(fx_short)
index_temp = results.index(fx_temp)
proposed = self._proposed[index_temp]
index1 = fx_short.index(fx_temp)
del proposed_short[index1]
fx_short.remove(fx_temp)
winners = np.transpose(
np.vstack([np.transpose(proposed_short), fx_short]))
self._m_active[self._min_index, :] = np.concatenate(
(proposed, np.array([fx_temp])))
self._min_index = np.argmin(
self._m_active[:, self._n_parameters])
self._set_running_log_likelihood(
np.min(self._m_active[:, self._n_parameters]))
self._accept_count += 1
return proposed, winners
class NestedController(object):
"""
Uses nested sampling to sample from a posterior distribution.
Parameters
----------
log_likelihood : pints.LogPDF
A :class:`LogPDF` function that evaluates points in the parameter
space.
log_prior : pints.LogPrior
A :class:`LogPrior` function on the same parameter space.
References
----------
.. [1] "Nested Sampling for General Bayesian Computation", John Skilling,
Bayesian Analysis 1:4 (2006).
https://doi.org/10.1214/06-BA127
.. [2] "Multimodal nested sampling: an efficient and robust alternative
to Markov chain Monte Carlo methods for astronomical data analyses"
F. Feroz and M. P. Hobson, 2008, Mon. Not. R. Astron. Soc.
"""
def __init__(self, log_likelihood, log_prior, method=None):
# Store log_likelihood and log_prior
# if not isinstance(log_likelihood, pints.LogLikelihood):
if not isinstance(log_likelihood, pints.LogPDF):
raise ValueError(
'Given log_likelihood must extend pints.LogLikelihood')
self._log_likelihood = log_likelihood
# Store function
if not isinstance(log_prior, pints.LogPrior):
raise ValueError('Given log_prior must extend pints.LogPrior')
self._log_prior = log_prior
# Get dimension
self._n_parameters = self._log_likelihood.n_parameters()
if self._n_parameters != self._log_prior.n_parameters():
raise ValueError(
'Given log_likelihood and log_prior must have same number of'
' parameters.')
# Logging
self._log_to_screen = True
self._log_filename = None
self._log_csv = False
# By default do serial evaluation
self._parallel = False
self._n_workers = 1
self.set_parallel()
# Parameters common to all routines
# Total number of iterations
self._iterations = 1000
# Total number of posterior samples
self._posterior_samples = 1000
# Convergence criterion in log-evidence
self._marginal_log_likelihood_threshold = 0.5
# Initial marginal difference
self._diff = np.float('-Inf')
# By default use ellipsoidal sampling
if method is None:
method = pints.NestedEllipsoidSampler
else:
try:
ok = issubclass(method, pints.NestedSampler)
except TypeError: # Not a class
ok = False
if not ok:
raise ValueError(
'Given method must extend pints.NestedSampler.'
)
self._sampler = method(log_prior=self._log_prior)
# Check if sensitivities are required
self._needs_sensitivities = self._sampler.needs_sensitivities()
# Performance metrics
self._time = None
# :meth:`run` can only be called once
self._has_run = False
def active_points(self):
"""
Returns the active points from nested sampling.
"""
return self._sampler.active_points()
def _diff_marginal_likelihood(self, i, d):
"""
Calculates difference in marginal likelihood between current and
previous iterations.
"""
v_temp = np.concatenate((
self._v_log_Z[0:(i - 1)],
[np.max(self._sampler._m_active[:, d])]
))
w_temp = np.concatenate((self._w[0:(i - 1)], [self._X[i]]))
self._diff = (
+ logsumexp(self._v_log_Z[0:(i - 1)], b=self._w[0:(i - 1)])
- logsumexp(v_temp, b=w_temp)
)
def effective_sample_size(self):
r"""
Calculates the effective sample size of posterior samples from a
nested sampling run using the formula:
.. math::
ESS = exp(-sum_{i=1}^{m} p_i log p_i),
in other words, the information. Given by eqn. (39) in [1]_.
"""
self._log_vP = (self._m_samples_all[:, self._n_parameters]
- self._log_Z + np.log(self._w))
return np.exp(-np.sum(self._vP * self._log_vP))
def inactive_points(self):
"""
Returns the inactive points from nested sampling.
"""
return self._m_inactive
def _initialise_callable(self):
"""
Initialises sensitivities if they are needed; otherwise, returns
a callable log likelihood.
"""
f = self._log_likelihood
if self._needs_sensitivities:
f = f.evaluateS1
return f
def _initialise_evaluator(self, f):
"""
Initialises parallel runners, if desired.
"""
# Create evaluator object
if self._parallel:
# Use at most n_workers workers
n_workers = self._n_workers
evaluator = pints.ParallelEvaluator(
f, n_workers=n_workers)
else:
evaluator = pints.SequentialEvaluator(f)
return evaluator
def _initialise_logger(self):
"""
Initialises logger.
"""
# Start logging
self._logging = self._log_to_screen or self._log_filename
if self._logging:
if self._log_to_screen:
# Show current settings
print('Running ' + self._sampler.name())
print('Number of active points: ' +
str(self._n_active_points))
print('Total number of iterations: ' + str(self._iterations))
print('Total number of posterior samples: ' + str(
self._posterior_samples))
# Set up logger
self._logger = pints.Logger()
if not self._log_to_screen:
self._logger.set_stream(None)
if self._log_filename:
self._logger.set_filename(
self._log_filename, csv=self._log_csv)
# Add fields to log
self._logger.add_counter('Iter.', max_value=self._iterations)
self._logger.add_counter('Eval.', max_value=self._iterations * 10)
self._logger.add_time('Time m:s')
self._logger.add_float('Delta_log(z)')
self._logger.add_float('Acceptance rate')
def _initial_points(self):
"""
Generates initial active points.
"""
m_initial = self._log_prior.sample(self._n_active_points)
v_fx = np.zeros(self._n_active_points)
for i in range(0, self._n_active_points):
# Calculate likelihood
v_fx[i] = self._evaluator.evaluate([m_initial[i, :]])[0]
self._sampler._n_evals += 1
# Show progress
if self._logging and i >= self._next_message:
# Log state
self._logger.log(0, self._sampler._n_evals,
self._timer.time(), self._diff, 1.0)
# Choose next logging point
if i > self._message_warm_up:
self._next_message = self._message_interval * (
1 + i // self._message_interval)
self._next_message = 0
return v_fx, m_initial
def iterations(self):
"""
Returns the total number of iterations that will be performed in the
next run.
"""
return self._iterations
def log_likelihood_vector(self):
"""
Returns vector of log likelihoods for each of the stacked
``[m_active, m_inactive]`` points.
"""
return self._m_samples_all[:, -1]
def marginal_log_likelihood(self):
"""
Calculates the marginal log likelihood of nested sampling run.
"""
# Include active particles in sample
m_active = self._sampler.active_points()
self._v_log_Z[self._iterations] = logsumexp(m_active[:,
self._n_parameters])
self._w[self._iterations:] = float(self._X[self._iterations]) / float(
self._sampler.n_active_points())
self._m_samples_all = np.vstack((self._m_inactive, m_active))
# Determine log evidence
log_Z = logsumexp(self._v_log_Z,
b=self._w[0:(self._iterations + 1)])
self._log_Z_called = True
return log_Z
def marginal_log_likelihood_standard_deviation(self):
"""
Calculates standard deviation in marginal log likelihood as in [2]_.
"""
if not self._log_Z_called:
self.marginal_log_likelihood()
log_L_minus_Z = self._v_log_Z - self._log_Z
log_Z_sd = logsumexp(log_L_minus_Z,
b=self._w[0:(self._iterations + 1)] *
log_L_minus_Z)
log_Z_sd = np.sqrt(log_Z_sd / self._sampler.n_active_points())
return log_Z_sd
def marginal_log_likelihood_threshold(self):
"""
Returns threshold for determining convergence in estimate of marginal
log likelihood which leads to early termination of the algorithm.
"""
return self._marginal_log_likelihood_threshold
def n_posterior_samples(self):
"""
Returns the number of posterior samples that will be returned (see
:meth:`set_n_posterior_samples()`).
"""
return self._posterior_samples
def parallel(self):
"""
Returns the number of parallel worker processes this routine will be
run on, or ``False`` if parallelisation is disabled.
"""
return self._n_workers if self._parallel else False
def posterior_samples(self):
"""
Returns posterior samples generated during run of nested
sampling object.
"""
return self._m_posterior_samples
def prior_space(self):
"""
Returns a vector of X samples which approximates the proportion
of prior space compressed.
"""
return self._X
def run(self):
"""
Runs the nested sampling routine and returns a tuple of the posterior
samples and an estimate of the marginal likelihood.
"""
# Can only run once for each controller instance
if self._has_run:
raise RuntimeError("Controller is valid for single use only")
self._has_run = True
# Choose method to evaluate
f = self._initialise_callable()
# Set parallel
self._evaluator = self._initialise_evaluator(f)
# Set number of active points
self._n_active_points = self._sampler.n_active_points()
# Start timing
self._timer = pints.Timer()
# Set up progress reporting
self._next_message = 0
self._message_warm_up = 0
self._message_interval = 20
self._initialise_logger()
d = self._n_parameters
v_fx, m_initial = self._initial_points()
self._sampler._initialise_active_points(m_initial, v_fx)
# store all inactive points, along with their respective
# log-likelihoods (hence, d+1)
self._m_inactive = np.zeros((self._iterations, d + 1))
# store weights
self._w = np.zeros(self._n_active_points + self._iterations)
# store X values (defined in [1])
self._X = np.zeros(self._iterations + 1)
self._X[0] = 1
# log marginal likelihood holder
self._v_log_Z = np.zeros(self._iterations + 1)
# Run!
self._X[0] = 1.0
self._i_message = 0
i_winners = 0
m_previous_winners = []
for i in range(0, self._iterations):
i_iter_complete = 0
self._i = i
a_min_index = self._sampler.min_index()
self._X[i + 1] = np.exp(-(i + 1) / self._n_active_points)
if i > 0:
self._w[i] = 0.5 * (self._X[i - 1] - self._X[i + 1])
else:
self._w[i] = self._X[i] - self._X[i + 1]
self._v_log_Z[i] = self._sampler.running_log_likelihood()
self._m_inactive[i, :] = self._sampler._m_active[a_min_index, :]
# check whether previous winners exceed threshold
if i_winners > 0:
m_previous_winners = m_previous_winners[(
m_previous_winners[:, self._n_parameters] >
self._sampler.running_log_likelihood()), :]
if m_previous_winners.shape[0] > 0:
index = np.random.choice(m_previous_winners.shape[0],
1, replace=False)
proposed = m_previous_winners[index, :self._n_parameters]
fx_temp = m_previous_winners[index, self._n_parameters]
m_previous_winners = np.delete(m_previous_winners,
index, 0)
self._sampler._m_active[self._sampler._min_index, :] = (
np.concatenate((proposed[0], fx_temp))
)
self._sampler._min_index = np.argmin(
self._sampler._m_active[:, self._n_parameters])
self._sampler._set_running_log_likelihood(
np.min(self._sampler._m_active[:, self._n_parameters])
)
self._sampler._accept_count += 1
i_iter_complete = 1
if i_iter_complete == 0:
# Propose new samples
proposed = self._sampler.ask(self._n_workers)
# Evaluate their fit
if self._n_workers > 1:
log_likelihood = self._evaluator.evaluate(proposed)
else:
log_likelihood = self._evaluator.evaluate([proposed])[0]
sample, winners = self._sampler.tell(log_likelihood)
while sample is None:
proposed = self._sampler.ask(self._n_workers)
if self._n_workers > 1:
log_likelihood = ( # pragma: no cover
self._evaluator.evaluate(proposed))
else:
log_likelihood = self._evaluator.evaluate(
[proposed])[0]
sample, winners = self._sampler.tell(log_likelihood)
if winners.size > 0:
if i_winners == 0:
m_previous_winners = winners
i_winners = 1
else:
m_previous_winners = [m_previous_winners, winners]
m_previous_winners = np.concatenate(m_previous_winners)
# Check whether within convergence threshold
if i > 2:
self._diff_marginal_likelihood(i, d)
if (np.abs(self._diff) <
self._marginal_log_likelihood_threshold):
if self._log_to_screen:
print( # pragma: no cover
'Convergence obtained with Delta_z = ' +
str(self._diff))
# shorten arrays according to current iteration
self._iterations = i
self._v_log_Z = self._v_log_Z[0:(self._iterations + 1)]
self._w = self._w[0:(
self._n_active_points + self._iterations)]
self._X = self._X[0:(self._iterations + 1)]
self._m_inactive = self._m_inactive[0:self._iterations, :]
break
# Show progress
self._update_logger()
# Calculate log_evidence and uncertainty
self._log_Z = self.marginal_log_likelihood()
self._log_Z_sd = self.marginal_log_likelihood_standard_deviation()
# Draw samples from posterior
n = self._posterior_samples
self._m_posterior_samples = self.sample_from_posterior(n)
# Stop timer
self._time = self._timer.time()
return self._m_posterior_samples
def sample_from_posterior(self, posterior_samples):
"""
Draws posterior samples based on nested sampling run using importance
sampling. This function is automatically called in
``NestedController.run()`` but can also be called afterwards to obtain
new posterior samples.
"""
if posterior_samples < 1:
raise ValueError('Number of posterior samples must be positive.')
# Calculate probabilities (can this be used to calculate effective
# sample size as in importance sampling?) of each particle
self._vP = np.exp(self._m_samples_all[:, self._n_parameters]
- self._log_Z) * self._w
# Draw posterior samples
m_theta = self._m_samples_all[:, :-1]
vIndex = np.random.choice(
range(0, self._iterations + self._sampler.n_active_points()),
size=posterior_samples, p=self._vP)
m_posterior_samples = m_theta[vIndex, :]
return m_posterior_samples
def set_iterations(self, iterations):
"""
Sets the total number of iterations to be performed in the next run.
"""
iterations = int(iterations)
if iterations < 0:
raise ValueError('Number of iterations cannot be negative.')
self._iterations = iterations
def set_log_to_file(self, filename=None, csv=False):
"""
Enables logging to file when a filename is passed in, disables it if
``filename`` is ``False`` or ``None``.
The argument ``csv`` can be set to ``True`` to write the file in comma
separated value (CSV) format. By default, the file contents will be
similar to the output on screen.
"""
if filename:
self._log_filename = str(filename)
self._log_csv = True if csv else False
else:
self._log_filename = None
self._log_csv = False
def set_log_to_screen(self, enabled):
"""
Enables or disables logging to screen.
"""
self._log_to_screen = True if enabled else False
def set_marginal_log_likelihood_threshold(self, threshold):
"""
Sets threshold for determining convergence in estimate of marginal
log likelihood which leads to early termination of the algorithm.
"""
if threshold <= 0:
raise ValueError('Convergence threshold must be positive.')
self._marginal_log_likelihood_threshold = threshold
def set_parallel(self, parallel=False):
"""
Enables/disables parallel evaluation.
If ``parallel=True``, the method will run using a number of worker
processes equal to the detected cpu core count. The number of workers
can be set explicitly by setting ``parallel`` to an integer greater
than 0.
Parallelisation can be disabled by setting ``parallel`` to ``0`` or
``False``.
"""
if parallel is True:
self._parallel = True
self._n_workers = pints.ParallelEvaluator.cpu_count()
elif parallel >= 1:
self._parallel = True
self._n_workers = int(parallel)
else:
self._parallel = False
self._n_workers = 1
def set_n_posterior_samples(self, posterior_samples):
"""
Sets the number of posterior samples to generate from points proposed
by the nested sampling algorithm.
"""
posterior_samples = int(posterior_samples)
if posterior_samples < 1:
raise ValueError(
'Number of posterior samples must be greater than zero.')
self._posterior_samples = posterior_samples
def time(self):
"""
Returns the time needed for the last run, in seconds, or ``None`` if
the controller hasn't run yet.
"""
return self._time
def _update_logger(self):
"""
Updates logger if necessary.
"""
# print(self._i_message)
# print(self._next_message)
if self._logging:
self._i_message += 1
if self._i_message >= self._next_message:
# Log state
self._logger.log(self._i_message, self._sampler._n_evals,
self._timer.time(), self._diff,
float(self._sampler._accept_count /
(self._sampler._n_evals -
self._sampler._n_active_points)))
# Choose next logging point
if self._i_message > self._message_warm_up:
self._next_message = self._message_interval * (
1 + self._i_message // self._message_interval)
| martinjrobins/hobo | pints/_nested/__init__.py | Python | bsd-3-clause | 29,201 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2016 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, SUPERUSER_ID
def migrate_res_users_role(env):
"""Migrate user roles database schema.
('res_users_role_user_rel' many2many table to 'res.users.role.line' model.
"""
role_line_model = env['res.users.role.line']
query = "SELECT role_id, user_id FROM res_users_role_user_rel;"
env.cr.execute(query)
rows = env.cr.fetchall()
for row in rows:
vals = {
'role_id': row[0],
'user_id': row[1],
}
role_line_model.create(vals)
def migrate(cr, version):
env = api.Environment(cr, SUPERUSER_ID, {})
migrate_res_users_role(env)
| rossasa/server-tools | base_user_role/migrations/8.0.1.1.0/post-migration.py | Python | agpl-3.0 | 766 | 0 |
from lcapy import Circuit
cct = Circuit("""
V 1 0 step 10; down
L 1 2 1e-3; right, size=1.2
C 2 3 1e-4; right, size=1.2
R 3 0_1 1; down
W 0 0_1; right
""")
import numpy as np
t = np.linspace(0, 0.01, 1000)
vr = cct.R.v.evaluate(t)
from matplotlib.pyplot import subplots, savefig
fig, ax = subplots(1)
ax.plot(t, vr, linewidth=2)
ax.set_xlabel('Time (s)')
ax.set_ylabel('Resistor voltage (V)')
ax.grid(True)
savefig('circuit-VRLC2-vr.png')
| mph-/lcapy | doc/examples/netlists/circuit-VRLC2-vr.py | Python | lgpl-2.1 | 442 | 0.004525 |
# -*- coding: utf-8 -*-
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
age = models.CharField(max_length=10)
def __unicode__(self):
return self.first_name
class PersonFile(models.Model):
filefield = models.FileField(upload_to='test')
def __unicode__(self):
return self.filefield
class Mercado(models.Model):
item = models.CharField(max_length=50)
qtde = models.IntegerField(default=0)
def __unicode__(self):
return self.item
class Invoice(models.Model):
name = models.CharField(max_length=50)
sales_date = models.DateField()
price = models.FloatField()
def __unicode__(self):
return self.name
class ItemInvoice(models.Model):
invoice = models.ForeignKey(Invoice)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
| valdergallo/django-chrono | example/models.py | Python | gpl-3.0 | 963 | 0 |
from multiprocessing import Process, JoinableQueue, Manager, Lock, Value, Event
import wiringpi as wp
import RPi.GPIO as rpio
from slaveprocess import SlaveProcess
import time
rpio.setmode(rpio.BCM)
class PMWProcess(Process):
def __init__(self,**kwargs):
super(PWMProcess, self).__init__(**kwargs)
self.event_enable_pwm = kwargs['event_enable_pwm']
self.event_terminate = kwargs['event_terminate']
self.pwm_freq = kwargs['pwm_freq']
self.pwm_duty = kwargs['pwm_duty']
self.lock_freq = kwargs['lock_freq']
self.pin = kwargs['pin']
def run():
while self.event_enable_pwm.is_set():
start_clock = time.time()
with self.lock_freq:
pwm_freq = self.pwm_freq.value
pwm_duty = self.pwm_duty.value
period=1./pwm_freq
class DriveCtrl():
def __init__(self, **kwargs):
self.cfg = kwargs['config']
self.queues = kwargs['queues']
## motor parameters :
self.speeds = (10,20,50,100)
if self.cfg.lookup('drive.speeds') is not None:
self.speeds = tuple([max(100,x) for x in self.cfg.lookup('drive.speeds')])
self.max_speed = max(self.speeds)
self.nb_speeds = len(self.speeds)
self.current_speed = self.speeds[0]
self.queues['log'].put('drive:nb speeds : %d'%(self.nb_speeds))
## pins :
self.power_pins={'L':0,'R':0}
self.direction_pins = {'L':0,'R':0}
self.monitor_pins={'LF':0,'LB':0,'RB':0,'RF':0}
self.pin_power_left = 0
self.pin_power_right = 0
self.pin_direction_left = 0
self.pin_direction_right = 0
## PWM options :
if self.cfg.lookup('gpio.pwm_freq'):
self.pwm_freq = float(self.cfg.gpio.pwm_freq)
else:
self.pwm_freq = 50.0
###################### DEFAULT DRIVE VECTORS #######################
#################################
# COMMANDS
#################################
## Drive commands :
# North :
# _ _
# ^ | |_____| | ^ | |x| |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| |
#
# North East :
# _ _
# ^ | |_ _ _| | | | |x|
# | | | ^ | | ^ | | | |
# 0.8 | | |__^__| | | 0.2 | | | |
# | |_| |_|
#
# East :
# _ _
# ^ | |_____| | | | | | |
# | | | ^ | | | | | |x|
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| v
#
# South East :
# _ _
# | | |_____| | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | v 0.8 | | |x|
# v |_| |_|
#
# South :
# _ _
# | | |_____| | | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | |x| |
# v |_| |_| v
#
# South West :
# _ _
# | |_____| | | | | | |
# | | ^ | | | | | | |
# 0.2 | | |__^__| | | 0.8 |x| | |
# v |_| |_| v
#
# West :
# _ _
# | | |_____| | ^ | | | |
# | | | ^ | | | |x| | |
# 1.0 | | |__^__| | | 1.0 | | | |
# v |_| |_| |
#
# North West :
# _ _
# ^ | |_____| | ^ |x| | |
# | | | ^ | | | | | | |
# 0.2 | |__^__| | | 0.8 | | | |
# |_| |_| |
#
# Full stop :
# _ _
# | |_____| | | | | |
# | | ^ | | | |x| |
# 0.0 | |__^__| | 0.0 | | | |
# |_| |_|
#
self.vec_north = (1.0,1.0,1,1,0,0)
self.vec_north_east = (0.8,0.2,1,1,0,0)
self.vec_east = (1.0,1.0,1,0,0,1)
self.vec_south_east = (0.8,0.2,0,0,1,1)
self.vec_south = (1.0,1.0,0,0,1,1)
self.vec_south_west = (0.2,0.8,0,0,1,1)
self.vec_west = (1.0,1.0,0,1,1,0)
self.vec_north_west = (0.2,0.8,1,1,0,0)
self.vec_full_stop = (0,0,0,0,0,0)
self.load_drive_vectors()
self.current_vector = self.vec_full_stop
## read the mapping of GPIO pins
self.read_gpio_map_from_config()
self.gpio_init()
self.dict_steer = {'8':self.vec_north, \
'9':self.vec_north_east, \
'6':self.vec_east,\
'3':self.vec_south_east,\
'2':self.vec_south,\
'1':self.vec_south_west,\
'4':self.vec_west,\
'7':self.vec_north_west,\
'5':self.vec_full_stop}
def load_drive_vectors(self):
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
vecpath = 'drive.vectors.'+vecname
#self.queues['log'].put('drive: loading drive vector %s'%vecpath)
if self.cfg.lookup(vecpath) is not None:
vecarray = self.cfg.lookup(vecpath)
if len(vecarray) != 6:
self.queues['log'].put('drive:error: drive vector %s in config file'%(vecname))
setattr(self,'vec_'+vecname, tuple([x for x in vecarray]))
def read_gpio_map_from_config(self):
self.pin_power_left = self.cfg.gpio.pin_pwm_left
self.pin_power_right = self.cfg.gpio.pin_pwm_right
self.pin_direction_left_forward = self.cfg.gpio.pin_direction_left_forward
self.pin_direction_right_forward = self.cfg.gpio.pin_direction_right_forward
self.pin_direction_left_rear = self.cfg.gpio.pin_direction_left_rear
self.pin_direction_right_rear = self.cfg.gpio.pin_direction_right_rear
def gpio_init(self):
wp.wiringPiSetupSys()
# Set output for those pins :
wp.pinMode(self.pin_power_left, wp.OUTPUT)
wp.pinMode(self.pin_power_right, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_rear, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_rear, wp.OUTPUT)
## create the SoftPwm on power pins :
wp.softPwmCreate(self.pin_power_left, 0, self.max_speed)
wp.softPwmCreate(self.pin_power_right, 0, self.max_speed)
## reset everyone :
self.gpio_zero()
def rpio_init(self):
## open pins for output :
rpio.setup(self.pin_power_left, rpio.OUT)
rpio.setup(self.pin_power_right, rpio.OUT)
rpio.setup(self.pin_direction_left_forward, rpio.OUT)
rpio.setup(self.pin_direction_right_forward, rpio.OUT)
rpio.setup(self.pin_direction_left_rear, rpio.OUT)
rpio.setup(self.pin_direction_right_rear, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
self.pwm_left = rpio.PWM(self.pin_power_left, self.pwm_freq)
self.pwm_right = rpio.PWM(self.pin_power_right, self.pwm_freq)
self.pwm_left.start(0)
self.pwm_right.start(0)
def gpio_zero(self):
# set everyone to 0
wp.softPwmWrite(self.pin_power_left, 0)
wp.softPwmWrite(self.pin_power_right, 0)
wp.digitalWrite(self.pin_direction_left_forward, 0)
wp.digitalWrite(self.pin_direction_right_forward, 0)
wp.digitalWrite(self.pin_direction_left_rear, 0)
wp.digitalWrite(self.pin_direction_right_rear, 0)
def rpio_zero(self):
self.pwm_left.ChangeDutyCycle(0)
self.pwm_right.ChangeDutyCycle(0)
rpio.output(self.pin_direction_left_forward, 0)
rpio.output(self.pin_direction_right_forward, 0)
rpio.output(self.pin_direction_left_rear, 0)
rpio.output(self.pin_direction_right_rear, 0)
def gpio_steer(self, drive_vector):
wp.softPwmWrite(self.pin_power_left, int(self.current_speed*drive_vector[0]))
wp.softPwmWrite(self.pin_power_right, int(self.current_speed*drive_vector[1]))
wp.digitalWrite(self.pin_direction_left_forward, drive_vector[2])
wp.digitalWrite(self.pin_direction_right_forward, drive_vector[3])
wp.digitalWrite(self.pin_direction_left_rear, drive_vector[4])
wp.digitalWrite(self.pin_direction_right_rear, drive_vector[5])
actual_vec = (int(self.current_speed*drive_vector[0]), int(self.current_speed*drive_vector[1]),drive_vector[2], drive_vector[3], drive_vector[4], drive_vector[5])
msg='drive:steering, drive vector: %s, ppl %d ppr %d pdlf %d pdrf %d pdlr %d pdrr %d'%(str(actual_vec),self.pin_power_left, self.pin_power_right, self.pin_direction_left_forward, self.pin_direction_right_forward, self.pin_direction_left_rear, self.pin_direction_right_rear)
self.queues['tx_msg'].put(msg)
self.queues['log'].put(msg)
def rpio_steer(self,drive_vector):
self.pwm_left.ChangeDutyCycle(self.current_speed*drive_vector[0])
self.pwm_right.ChangeDutyCycle(self.current_speed*drive_vector[1])
rpio.output(self.pin_direction_left_forward, drive_vector[2])
rpio.output(self.pin_direction_right_forward, drive_vector[3])
rpio.output(self.pin_direction_left_rear, drive_vector[4])
rpio.output(self.pin_direction_right_rear, drive_vector[5])
actual_vec = (int(self.current_speed*drive_vector[0]), int(self.current_speed*drive_vector[1]),drive_vector[2], drive_vector[3], drive_vector[4], drive_vector[5])
msg='drive:steering, drive vector: %s, ppl %d ppr %d pdlf %d pdrf %d pdlr %d pdrr %d\n'%(str(actual_vec),self.pin_power_left, self.pin_power_right, self.pin_direction_left_forward, self.pin_direction_right_forward, self.pin_direction_left_rear, self.pin_direction_right_rear)
self.current_vector = drive_vector
self.queues['tx_msg'].put(msg)
self.queues['log'].put(msg)
def rpio_cleanup(self):
self.pwm_left.stop()
self.pwm_right.stop()
rpio.cleanup()
def execute_drive_cmd(self,raw_cmd):
self.queues['log'].put("drive:executing cmd :%s"%raw_cmd)
if len(raw_cmd)>2:
if raw_cmd[1] == 'G':
## command 'DG[1-9]' : steering command
if raw_cmd[2] in self.dict_steer:
self.gpio_steer(self.dict_steer[raw_cmd[2]])
else:
self.queues['tx_msg'].put('drive:unknown steering command key \"%s\" (available : [1-9]).\n'%(raw_cmd[2]))
elif raw_cmd[1] == 'S':
## command 'DS[0-9]' : change speed
speed_setting = int(raw_cmd[2:])
if speed_setting >= 0:
self.current_speed = self.speeds[min(self.nb_speeds-1,speed_setting)]
self.gpio_steer(self.current_vector)
self.queues['log'].put('drive:current speed set to %s'%(str(self.current_speed)))
else:
self.queues['tx_msg'].put('drive:could not change speed setting to %d, must be positive'%(speed_setting))
elif raw_cmd[1] == 'M':
## command 'DM' : requesting monitoring data
pass
else:
self.queues['tx_msg'].put('drive:discarding malformed speed setting command \"%s\"\n'%raw_cmd)
def checks(self, remote=False):
## check drive vectors :
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
msg = 'drive:checking drive vector %s:%s'%(vecname,getattr(self,'vec_'+vecname).__repr__())
self.queues['log'].put(msg)
if remote:
self.queues['tx_msg'].put(msg)
## check speed settings
msg='drive:checking available speeds: %s'%(str(self.speeds))
self.queues['log'].put(msg)
if remote:
self.queues['tx_msg'].put(msg)
def shutdown(self):
self.gpio_zero()
#self.gpio_cleanup()
self.queues['log'].put('drive:stop.')
if __name__ == "__main__":
pwm_freq = 100
pin_power_left = 16
pin_power_right = 20
pin_direction_left_forward = 6
pin_direction_right_forward = 13
pin_direction_left_rear = 19
pin_direction_right_rear = 26
rpio.setmode(rpio.BCM)
## open pins for output :
rpio.setup(pin_power_left, rpio.OUT)
rpio.setup(pin_power_right, rpio.OUT)
rpio.setup(pin_direction_left, rpio.OUT)
rpio.setup(pin_direction_right, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
pwm_left = rpio.PWM(pin_power_left, pwm_freq)
pwm_right = rpio.PWM(pin_power_right, pwm_freq)
pwm_left.start(50)
pwm_right.start(50)
current_cycle_up = 50
current_cycle_down = 50
goon=True
periode=0.01
step=1
while goon:
try:
pwm_left.ChangeDutyCycle(current_cycle_up)
pwm_right.ChangeDutyCycle(current_cycle_down)
print current_cycle_up, current_cycle_down
current_cycle_up = abs((current_cycle_up + step)%100)
current_cycle_down = abs((current_cycle_down - step)%100)
time.sleep(periode)
except KeyboardInterrupt as e:
goon=False
rpio.cleanup()
| bcare/roverpi | roverserver/enginectrl.py | Python | gpl-3.0 | 14,624 | 0.019147 |
from easyprocess import EasyProcess
from entrypoint2 import entrypoint
from pyvirtualdisplay.display import Display
def extract_version(txt):
"""This function tries to extract the version from the help text"""
words = txt.replace(",", " ").split()
version = None
for x in reversed(words):
if len(x) > 2:
if x[0].lower() == "v":
x = x[1:]
if "." in x and x[0].isdigit():
version = x
break
return version
def version():
"""
return eagle version.
It does not work without X!
:rtype: string
"""
return extract_version(EasyProcess("eagle -?").call().stdout)
@entrypoint
def print_version():
with Display(visible=False):
print(version())
| ponty/eagexp | eagexp/version.py | Python | bsd-2-clause | 775 | 0 |
# strategy_best2.py
# Strategy pattern -- function-based implementation
# selecting best promotion from current module globals
"""
>>> joe = Customer('John Doe', 0)
>>> ann = Customer('Ann Smith', 1100)
>>> cart = [LineItem('banana', 4, .5),
... LineItem('apple', 10, 1.5),
... LineItem('watermellon', 5, 5.0)]
>>> Order(joe, cart, fidelity_promo)
<Order total: 42.00 due: 42.00>
>>> Order(ann, cart, fidelity_promo)
<Order total: 42.00 due: 39.90>
>>> banana_cart = [LineItem('banana', 30, .5),
... LineItem('apple', 10, 1.5)]
>>> Order(joe, banana_cart, bulk_item_promo)
<Order total: 30.00 due: 28.50>
>>> long_order = [LineItem(str(item_code), 1, 1.0)
... for item_code in range(10)]
>>> Order(joe, long_order, large_order_promo)
<Order total: 10.00 due: 9.30>
>>> Order(joe, cart, large_order_promo)
<Order total: 42.00 due: 42.00>
# BEGIN STRATEGY_BEST_TESTS
>>> Order(joe, long_order, best_promo)
<Order total: 10.00 due: 9.30>
>>> Order(joe, banana_cart, best_promo)
<Order total: 30.00 due: 28.50>
>>> Order(ann, cart, best_promo)
<Order total: 42.00 due: 39.90>
# END STRATEGY_BEST_TESTS
"""
from collections import namedtuple
Customer = namedtuple('Customer', 'name fidelity')
class LineItem:
def __init__(self, product, quantity, price):
self.product = product
self.quantity = quantity
self.price = price
def total(self):
return self.price * self.quantity
class Order: # the Context
def __init__(self, customer, cart, promotion=None):
self.customer = customer
self.cart = list(cart)
self.promotion = promotion
def total(self):
if not hasattr(self, '__total'):
self.__total = sum(item.total() for item in self.cart)
return self.__total
def due(self):
if self.promotion is None:
discount = 0
else:
discount = self.promotion(self)
return self.total() - discount
def __repr__(self):
fmt = '<Order total: {:.2f} due: {:.2f}>'
return fmt.format(self.total(), self.due())
def fidelity_promo(order):
"""5% discount for customers with 1000 or more fidelity points"""
return order.total() * .05 if order.customer.fidelity >= 1000 else 0
def bulk_item_promo(order):
"""10% discount for each LineItem with 20 or more units"""
discount = 0
for item in order.cart:
if item.quantity >= 20:
discount += item.total() * .1
return discount
def large_order_promo(order):
"""7% discount for orders with 10 or more distinct items"""
distinct_items = {item.product for item in order.cart}
if len(distinct_items) >= 10:
return order.total() * .07
return 0
# BEGIN STRATEGY_BEST2
promos = [globals()[name] for name in globals() # <1>
if name.endswith('_promo') # <2>
and name != 'best_promo'] # <3>
def best_promo(order):
"""Select best discount available
"""
return max(promo(order) for promo in promos) # <4>
# END STRATEGY_BEST2
| pythonprobr/oscon2014 | strategy/strategy_best2.py | Python | mit | 3,172 | 0.001261 |
"""Test model properties."""
import pytest
from gremlin_python.statics import long
from goblin import element, exception, manager, properties
def test_set_change_property(person, lives_in):
# vertex
assert not person.name
person.name = 'leif'
assert person.name == 'leif'
person.name = 'leifur'
assert person.name == 'leifur'
# edge
assert not lives_in.notes
lives_in.notes = 'notable'
assert lives_in.notes == 'notable'
lives_in.notes = 'more notable'
assert lives_in.notes == 'more notable'
def test_property_default(knows):
assert knows.notes == 'N/A'
knows.notes = 'notable'
assert knows.notes == 'notable'
def test_false_bool_default(place):
assert place.incorporated.value is False
def test_validation(person):
person.age = 10
with pytest.raises(Exception):
person.age = 'hello'
def test_setattr_validation(person):
setattr(person, 'age', 10)
assert person.age == 10
with pytest.raises(Exception):
setattr(person, 'age', 'hello')
def test_set_id_long(person):
person.id = 1
assert isinstance(person.id, long)
def test_id_class_attr_throws(person_class):
with pytest.raises(exception.ElementError):
person_class.id
# Vertex properties
def test_set_change_vertex_property(person):
assert not person.birthplace
person.birthplace = 'Iowa City'
assert isinstance(person.birthplace, element.VertexProperty)
assert person.birthplace.value == 'Iowa City'
person.birthplace = 'U of I Hospital'
assert person.birthplace.value == 'U of I Hospital'
def test_vertex_property_default():
"""Makes sure that a brand new VertexProperty (i.e., with no value set) is
still representable. Addresses issue #52.
"""
vp = element.VertexProperty(int)
assert repr(vp) == "<VertexProperty(type=0, value=None)"
def test_validate_vertex_prop(person):
assert not person.birthplace
person.birthplace = 1
assert person.birthplace.value == '1'
def test_set_change_list_card_vertex_property(person):
assert not person.nicknames
person.nicknames = 'sly'
assert isinstance(person.nicknames, list)
assert isinstance(person.nicknames, manager.ListVertexPropertyManager)
assert isinstance(person.nicknames[0], element.VertexProperty)
assert person.nicknames[0].value == 'sly'
assert person.nicknames('sly') == person.nicknames[0]
person.nicknames = set(['sly', 'guy'])
assert isinstance(person.nicknames, list)
assert person.nicknames('sly').value == 'sly'
assert person.nicknames('guy').value == 'guy'
person.nicknames = ('sly', 'big', 'guy')
assert isinstance(person.nicknames, list)
assert [v.value for v in person.nicknames] == ['sly', 'big', 'guy']
person.nicknames = ['sly', 'big', 'guy', 'guy']
assert isinstance(person.nicknames, list)
assert len(person.nicknames('guy')) == 2
assert [v.value for v in person.nicknames] == ['sly', 'big', 'guy', 'guy']
person.nicknames.append(1)
assert person.nicknames('1').value == '1'
def test_list_card_vertex_property_validation(person):
person.nicknames = [1, 1.5, 2]
assert [v.value for v in person.nicknames] == ['1', '1.5', '2']
def test_set_change_set_card_vertex_property(place):
assert not place.important_numbers
place.important_numbers = 1
assert isinstance(place.important_numbers, set)
assert isinstance(place.important_numbers,
manager.SetVertexPropertyManager)
number_one, = place.important_numbers
assert isinstance(number_one, element.VertexProperty)
assert number_one.value == 1
assert place.important_numbers(1) == number_one
place.important_numbers = [1, 2]
assert isinstance(place.important_numbers, set)
assert {v.value for v in place.important_numbers} == set([1, 2])
place.important_numbers.add(3)
assert {v.value for v in place.important_numbers} == set([1, 2, 3])
place.important_numbers = (1, 2, 3, 4)
assert isinstance(place.important_numbers, set)
assert {v.value for v in place.important_numbers} == set([1, 2, 3, 4])
place.important_numbers = set([1, 2, 3])
assert isinstance(place.important_numbers, set)
assert {v.value for v in place.important_numbers} == set([1, 2, 3])
with pytest.raises(exception.ValidationError):
place.important_numbers.add('dude')
def test_set_card_union(place):
place.important_numbers = set([1, 2, 3])
place.important_numbers = place.important_numbers.union({3, 4, 5})
def test_set_card_64bit_integer(place):
place.important_numbers = set([long(1), long(2), long(3)])
assert all(isinstance(i.value, long) for i in place.important_numbers)
def test_set_card_validation_vertex_property(place):
with pytest.raises(exception.ValidationError):
place.important_numbers = set(['hello', 2, 3])
def test_cant_set_vertex_prop_on_edge():
with pytest.raises(exception.MappingError):
class MyEdge(element.Edge):
vert_prop = element.VertexProperty(properties.String)
def test_meta_property_set_update(place):
assert not place.historical_name
place.historical_name = ['hispania', 'al-andalus']
place.historical_name('hispania').notes = 'roman rule'
assert place.historical_name('hispania').notes == 'roman rule'
place.historical_name('hispania').year = 300
assert place.historical_name('hispania').year == 300
place.historical_name('al-andalus').notes = 'muslim rule'
assert place.historical_name('al-andalus').notes == 'muslim rule'
place.historical_name('al-andalus').year = 700
assert place.historical_name('al-andalus').year == 700
def test_meta_property_validation(place):
assert not place.historical_name
place.historical_name = ['spain']
with pytest.raises(exception.ValidationError):
place.historical_name('spain').year = 'hello'
class TestString:
def test_validation(self, string):
assert string.validate(1) == '1'
def test_to_db(self, string):
assert string.to_db('hello') == 'hello'
def test_to_ogm(self, string):
assert string.to_ogm('hello') == 'hello'
def test_initval_to_db(self, string_class):
string = string_class('hello')
assert string.to_db() == 'hello'
class TestInteger:
def test_validation(self, integer):
assert integer.validate('1') == 1
with pytest.raises(Exception):
integer.validate('hello')
def test_to_db(self, integer):
assert integer.to_db(1) == 1
def test_to_ogm(self, integer):
assert integer.to_db(1) == 1
def test_initval_to_db(self, integer_class):
integer = integer_class(1)
assert integer.to_db() == 1
class TestFloat:
def test_validation(self, flt):
assert flt.validate(1.2) == 1.2
with pytest.raises(Exception):
flt.validate('hello')
def test_to_db(self, flt):
assert flt.to_db(1.2) == 1.2
def test_to_ogm(self, flt):
assert flt.to_db(1.2) == 1.2
def test_initval_to_db(self, flt_class):
flt = flt_class(1.2)
assert flt.to_db() == 1.2
class TestBoolean:
def test_validation_true(self, boolean):
assert boolean.validate(True)
def test_validation_false(self, boolean):
assert not boolean.validate(False)
def test_to_db_true(self, boolean):
assert boolean.to_db(True)
def test_to_db_false(self, boolean):
assert not boolean.to_db(False)
def test_to_ogm_true(self, boolean):
assert boolean.to_ogm(True)
def test_to_ogm_false(self, boolean):
assert not boolean.to_ogm(False)
def test_initval_to_db_true(self, boolean_class):
boolean = boolean_class(True)
assert boolean.to_db()
def test_initval_to_db_true(self, boolean_class):
boolean = boolean_class(False)
assert not boolean.to_db()
| ZEROFAIL/goblin | tests/test_properties.py | Python | agpl-3.0 | 7,949 | 0 |
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import eqpy
setup(
name='eqpy',
version=eqpy.__version__,
description='Solve systems of equations and assumptions, linear and '
'non-linear, numerically and symbolically.',
url='http://github.com/eriknw/eqpy/',
author='https://raw.github.com/eriknw/eqpy/master/AUTHORS.md',
maintainer='Erik Welch',
maintainer_email='erik.n.welch@gmail.com',
license='BSD',
keywords='math CAS equations symbolic sympy',
packages=[
'eqpy',
],
classifiers=[
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
],
long_description=open('README.md').read() if exists("README.md") else "",
zip_safe=False,
)
| eriknw/eqpy | setup.py | Python | bsd-3-clause | 1,281 | 0 |
import re
class HeadingsParser():
"""
The HeadingParser parses the document for headings.
NOT YET: converts headings to raw latex headings in the correct way, so that they can be referrenced to later
see https://www.sharelatex.com/learn/Sections_and_chapters for info about the levels"""
def __init__(self):
super().__init__()
self.title = None
self.subtitle = None
self.heading = []
# regexes
self.title_start_marker_regex = re.compile(r'[=]{3,}')
self.title_end_marker_regex = re.compile(r'[=]{3,}')
self.title_content_regex = re.compile(
r'''
^ # beginning of line
[ ] # one whitespace
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
(?P<title>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
[ ] # one whitespace
$ # end of line
''', re.VERBOSE|re.UNICODE
)
self.subtitle_start_marker_regex = re.compile(r'[-]{3,}')
self.subtitle_end_marker_regex = re.compile(r'[-]{3,}')
self.subtitle_content_regex = re.compile(
r'''
^ # beginning of line
[ ] # one whitespace
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
(?P<subtitle>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
[ ] # one whitespace
$ # end of line
''', re.VERBOSE|re.UNICODE
)
# Headings cannot begin with whitespace
self.h_content_regex = re.compile(
r'''
^ # beginning of line
[A-Za-z0-9äöüÄÖÜß(] # alphanum
[A-Za-z0-9äöüÄÖÜß,() -]* # alphanum or space
[A-Za-z0-9äöüÄÖÜß)] # alphanum
$ # end of line
''', re.VERBOSE|re.UNICODE
)
# chapter
self.h1_underlining_regex = re.compile(r'[=]{3,}')
# section
self.h2_underlining_regex = re.compile(r'[-]{3,}')
# subsection
self.h3_underlining_regex = re.compile(r'[~]{3,}')
# subsubsection
self.h4_underlining_regex = re.compile(r'[\^]{3,}')
# paragraph
self.h5_underlining_regex = re.compile(r'[*]{3,}')
# subparagraph
self.h6_underlining_regex = re.compile(r'[.]{3,}')
def parse(self, rst_file_content):
self.title = self.find_title(rst_file_content)
self.subtitle_content_regex = self.find_subtitle(rst_file_content)
return self.find_heading_labels(rst_file_content)
def find_title(self, rst_file_content):
print('looking for title ...')
title = None
for lineno, line in enumerate(rst_file_content):
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
# title
if (
self.title_start_marker_regex.match(previous_line) and
self.title_end_marker_regex.match(next_line) and
(
len(self.title_start_marker_regex.match(previous_line).group()) ==
len(self.title_end_marker_regex.match(next_line).group())
) and
self.title_content_regex.match(line) and
not title
):
title = self.title_content_regex.match(line).group('title')
print('title is:|', title, '|', sep='')
break
if not title: print('Could not find title in document.')
return title
def find_subtitle(self, rst_file_content):
print('looking for subtitle ...')
subtitle = None
for lineno, line in enumerate(rst_file_content):
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
if (
self.subtitle_start_marker_regex.match(previous_line) and
self.subtitle_end_marker_regex.match(next_line) and
(
len(self.subtitle_start_marker_regex.match(previous_line).group()) ==
len(self.subtitle_end_marker_regex.match(next_line).group())
) and
self.subtitle_content_regex.match(line) and
not subtitle
):
subtitle = self.subtitle_content_regex.match(line).group('subtitle')
print('subtitle is:|', subtitle, '|', sep='')
break
if not subtitle: print('Could not find subtitle in document.')
return subtitle
def find_heading_labels(self, rst_file_content):
print('looking for headings ...')
headings_dict = {}
# heading_labels = []
for lineno, line in enumerate(rst_file_content):
# print('current line:', lineno)
# print('current line:', line)
# if line.startswith("Schlussfolgerungen"):
# print('current line:', line)
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
# headings level 1
# print('looking for h1 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h1_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h1_underlining_regex.match(next_line).group())
):
print('found a h1:', line)
print('replacing chapter heading')
headings_dict[line] = self.heading_to_label(line, 'chapter')
# heading_labels.append(self.heading_to_label(line, 'chapter'))
rst_file_content[lineno] = ':raw-latex:`\chapter{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'chapter') + '}`'
# headings level 2
# print('looking for h2 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h2_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h2_underlining_regex.match(next_line).group())
):
print('found a h2:', line)
headings_dict[line] = self.heading_to_label(line, 'section')
# heading_labels.append(self.heading_to_label(line, 'section'))
rst_file_content[lineno] = ':raw-latex:`\section{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'section') + '}`'
# headings level 3
# print('looking for h3 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h3_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h3_underlining_regex.match(next_line).group())
):
print('found a h3:', line)
# heading_labels.append(self.heading_to_label(line, 'subsection'))
headings_dict[line] = self.heading_to_label(line, 'subsection')
rst_file_content[lineno] = ':raw-latex:`\subsection{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsection') + '}`'
# headings level 4
# print('looking for h4 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h4_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h4_underlining_regex.match(next_line).group())
):
print('found a h4:', line)
# heading_labels.append(self.heading_to_label(line, 'subsubsection'))
headings_dict[line] = self.heading_to_label(line, 'subsubsection')
rst_file_content[lineno] = ':raw-latex:`\subsubsection{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsubsection') + '}`'
# headings level 5
# print('looking for h5 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h5_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h5_underlining_regex.match(next_line).group())
):
print('found a h5:', line)
# heading_labels.append(self.heading_to_label(line, 'paragraph'))
headings_dict[line] = self.heading_to_label(line, 'paragraph')
rst_file_content[lineno] = ':raw-latex:`\paragraph{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'paragraph') + '}`'
# headings level 6
# print('looking for h6 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h6_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h6_underlining_regex.match(next_line).group())
):
print('found a h6:', line)
# heading_labels.append(self.heading_to_label(line, 'subparagraph'))
headings_dict[line] = self.heading_to_label(line, 'subparagraph')
rst_file_content[lineno] = ':raw-latex:`\subparagraph{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subparagraph') + '}`'
return headings_dict
def heading_to_label(self, heading_text, level):
heading_text = heading_text.lower()
replaced_chars = {
' ': '-',
'(': '',
')': ''
}
for key,value in replaced_chars.items():
heading_text = heading_text.replace(key, value)
return '{0}:{1}'.format(level, heading_text)
# self.chapter_delimiter_regex = re.compile(r'={3,}') # =============
# self.section_delimiter_regex = re.compile(r'-{3,}') # -------------
# self.subsection_delimiter_regex = re.compile(r'~{3,}') # ~~~~~~~~~~~~~
# self.subsubsection_delimiter_regex = re.compile(r'\^{3,}') # ^^^^^^^^^^^^^
# self.heading_text_regex = re.compile(
# r'''
# ^
# \s*
# (?P<title_text>
# [a-zA-Z0-9]
# [a-zA-Z0-9_ -]*
# [a-zA-Z0-9]
# )
# \s*
# $''',
# re.VERBOSE)
# self.heading_keys = []
# def parse_headings(self, rst_file_content):
# for lineno, line in enumerate(rst_file_content):
#
# # search for title
# if self.title_delimiter_regex.search(line) is not None:
# if (lineno >= 2):
# if (
# self.title_delimiter_regex.search(rst_file_content[lineno - 2]) is not None and
# self.heading_text_regex.search(rst_file_content[lineno - 1]) is not None
# ):
# title_text = self.heading_text_regex.findall(rst_file_content[lineno - 1])[0].strip()
# self.heading_keys.append(re.sub('\s+', '-', title_text.lower()))
# print('[DEBUG:HEADINGS]', self.heading_keys)
# print('[DEBUG:HEADINGS] !!! found a title in the document:', title_text, sep='')
#
# # TODO: elif subtitle
| ZelphirKaltstahl/rst-internal-links-to-raw-latex | RSTInternalLinks/HeadingsParser.py | Python | gpl-3.0 | 13,092 | 0.004987 |
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Python XPCOM language bindings.
#
# The Initial Developer of the Original Code is
# ActiveState Tool Corp.
# Portions created by the Initial Developer are Copyright (C) 2000, 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Hammond <mhammond@skippinet.com.au> (original author)
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
# This is a demo is how to use the xpcom.server "tracer" facility.
#
# This demo installs a tracer that uses the Python profiler. It then
# creates the Python test component, and references some methods
# and properties. It then dumps the profile statistics.
# This same technique could also be used for debugging, for example.
import profile
p = profile.Profile()
getters = {}
setters = {}
# A wrapper around a function - looks like a function,
# but actually profiles the delegate.
class TracerDelegate:
def __init__(self, callme):
self.callme = callme
def __call__(self, *args):
return p.runcall(self.callme, *args)
# A wrapper around each of our XPCOM objects. All PyXPCOM calls
# in are made on this object, which creates a TracerDelagate around
# every function. As the function is called, it collects profile info.
class Tracer:
def __init__(self, ob):
self.__dict__['_ob'] = ob
def __repr__(self):
return "<Tracer around %r>" % (self._ob,)
def __str__(self):
return "<Tracer around %r>" % (self._ob,)
def __getattr__(self, attr):
ret = getattr(self._ob, attr) # Attribute error just goes up
if callable(ret):
return TracerDelegate(ret)
else:
if not attr.startswith("_com_") and not attr.startswith("_reg_"):
getters[attr] = getters.setdefault(attr,0) + 1
return ret
def __setattr__(self, attr, val):
if self.__dict__.has_key(attr):
self.__dict__[attr] = val
return
setters[attr] = setters.setdefault(attr,0) + 1
setattr(self._ob, attr, val)
# Installed as a global XPCOM function that if exists, will be called
# to wrap each XPCOM object created.
def MakeTracer(ob):
# In some cases we may be asked to wrap ourself, so handle that.
if isinstance(ob, Tracer):
return ob
return Tracer(ob)
def test():
import xpcom.server, xpcom.components
xpcom.server.tracer = MakeTracer
contractid = "Python.TestComponent"
for i in range(100):
c = xpcom.components.classes[contractid].createInstance().queryInterface(xpcom.components.interfaces.nsIPythonTestInterface)
c.boolean_value = 0
a = c.boolean_value
c.do_boolean(0,1)
print "Finshed"
p.print_stats()
print "%-30s%s" % ("Attribute Gets", "Number")
print "-" * 36
for name, num in getters.items():
print "%-30s%d" % (name, num)
print "%-30s%s" % ("Attribute Sets", "Number")
print "-" * 36
for name, num in setters.items():
print "%-30s%d" % (name, num)
test()
| yuyuyu101/VirtualBox-NetBSD | src/libs/xpcom18a4/python/tools/tracer_demo.py | Python | gpl-2.0 | 4,360 | 0.003899 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.resourcemanager_v3.services.projects import pagers
from google.cloud.resourcemanager_v3.types import projects
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import ProjectsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ProjectsGrpcAsyncIOTransport
from .client import ProjectsClient
class ProjectsAsyncClient:
"""Manages Google Cloud Projects."""
_client: ProjectsClient
DEFAULT_ENDPOINT = ProjectsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ProjectsClient.DEFAULT_MTLS_ENDPOINT
project_path = staticmethod(ProjectsClient.project_path)
parse_project_path = staticmethod(ProjectsClient.parse_project_path)
common_billing_account_path = staticmethod(
ProjectsClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
ProjectsClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(ProjectsClient.common_folder_path)
parse_common_folder_path = staticmethod(ProjectsClient.parse_common_folder_path)
common_organization_path = staticmethod(ProjectsClient.common_organization_path)
parse_common_organization_path = staticmethod(
ProjectsClient.parse_common_organization_path
)
common_project_path = staticmethod(ProjectsClient.common_project_path)
parse_common_project_path = staticmethod(ProjectsClient.parse_common_project_path)
common_location_path = staticmethod(ProjectsClient.common_location_path)
parse_common_location_path = staticmethod(ProjectsClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ProjectsAsyncClient: The constructed client.
"""
return ProjectsClient.from_service_account_info.__func__(ProjectsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ProjectsAsyncClient: The constructed client.
"""
return ProjectsClient.from_service_account_file.__func__(ProjectsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return ProjectsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> ProjectsTransport:
"""Returns the transport used by the client instance.
Returns:
ProjectsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(ProjectsClient).get_transport_class, type(ProjectsClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, ProjectsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the projects client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ProjectsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = ProjectsClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def get_project(
self,
request: Union[projects.GetProjectRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> projects.Project:
r"""Retrieves the project identified by the specified ``name`` (for
example, ``projects/415104041262``).
The caller must have ``resourcemanager.projects.get`` permission
for this project.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_get_project():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.GetProjectRequest(
name="name_value",
)
# Make the request
response = client.get_project(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.GetProjectRequest, dict]):
The request object. The request sent to the
[GetProject][google.cloud.resourcemanager.v3.Projects.GetProject]
method.
name (:class:`str`):
Required. The name of the project (for example,
``projects/415104041262``).
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.resourcemanager_v3.types.Project:
A project is a high-level Google
Cloud entity. It is a container for
ACLs, APIs, App Engine Apps, VMs, and
other Google Cloud Platform resources.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.GetProjectRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_project,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_projects(
self,
request: Union[projects.ListProjectsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListProjectsAsyncPager:
r"""Lists projects that are direct children of the specified folder
or organization resource. ``list()`` provides a strongly
consistent view of the projects underneath the specified parent
resource. ``list()`` returns projects sorted based upon the
(ascending) lexical ordering of their ``display_name``. The
caller must have ``resourcemanager.projects.list`` permission on
the identified parent.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_list_projects():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.ListProjectsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_projects(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.ListProjectsRequest, dict]):
The request object. The request sent to the
[ListProjects][google.cloud.resourcemanager.v3.Projects.ListProjects]
method.
parent (:class:`str`):
Required. The name of the parent
resource to list projects under.
For example, setting this field to
'folders/1234' would list all projects
directly under that folder.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.resourcemanager_v3.services.projects.pagers.ListProjectsAsyncPager:
A page of the response received from the
[ListProjects][google.cloud.resourcemanager.v3.Projects.ListProjects]
method.
A paginated response where more pages are available
has next_page_token set. This token can be used in a
subsequent request to retrieve the next request page.
NOTE: A response may contain fewer elements than the
request page_size and still have a next_page_token.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.ListProjectsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_projects,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListProjectsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def search_projects(
self,
request: Union[projects.SearchProjectsRequest, dict] = None,
*,
query: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.SearchProjectsAsyncPager:
r"""Search for projects that the caller has both
``resourcemanager.projects.get`` permission on, and also satisfy
the specified query.
This method returns projects in an unspecified order.
This method is eventually consistent with project mutations;
this means that a newly created project may not appear in the
results or recent updates to an existing project may not be
reflected in the results. To retrieve the latest state of a
project, use the
[GetProject][google.cloud.resourcemanager.v3.Projects.GetProject]
method.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_search_projects():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.SearchProjectsRequest(
)
# Make the request
page_result = client.search_projects(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.SearchProjectsRequest, dict]):
The request object. The request sent to the
[SearchProjects][google.cloud.resourcemanager.v3.Projects.SearchProjects]
method.
query (:class:`str`):
Optional. A query string for searching for projects that
the caller has ``resourcemanager.projects.get``
permission to. If multiple fields are included in the
query, the it will return results that match any of the
fields. Some eligible fields are:
::
| Field | Description |
|-------------------------|----------------------------------------------|
| displayName, name | Filters by displayName. |
| parent | Project's parent. (for example: folders/123,
organizations/*) Prefer parent field over parent.type and parent.id. |
| parent.type | Parent's type: `folder` or `organization`. |
| parent.id | Parent's id number (for example: 123) |
| id, projectId | Filters by projectId. |
| state, lifecycleState | Filters by state. |
| labels | Filters by label name or value. |
| labels.<key> (where *key* is the name of a label) | Filters by label
name. |
Search expressions are case insensitive.
Some examples queries:
::
| Query | Description |
|------------------|-----------------------------------------------------|
| name:how* | The project's name starts with "how". |
| name:Howl | The project's name is `Howl` or `howl`. |
| name:HOWL | Equivalent to above. |
| NAME:howl | Equivalent to above. |
| labels.color:* | The project has the label `color`. |
| labels.color:red | The project's label `color` has the value `red`. |
| labels.color:red labels.size:big | The project's label `color` has
the value `red` and its label `size` has the value `big`. |
If no query is specified, the call will return projects
for which the user has the
``resourcemanager.projects.get`` permission.
This corresponds to the ``query`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.resourcemanager_v3.services.projects.pagers.SearchProjectsAsyncPager:
A page of the response received from the
[SearchProjects][google.cloud.resourcemanager.v3.Projects.SearchProjects]
method.
A paginated response where more pages are available
has next_page_token set. This token can be used in a
subsequent request to retrieve the next request page.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([query])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.SearchProjectsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if query is not None:
request.query = query
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.search_projects,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.SearchProjectsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def create_project(
self,
request: Union[projects.CreateProjectRequest, dict] = None,
*,
project: projects.Project = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Request that a new project be created. The result is an
``Operation`` which can be used to track the creation process.
This process usually takes a few seconds, but can sometimes take
much longer. The tracking ``Operation`` is automatically deleted
after a few hours, so there is no need to call
``DeleteOperation``.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_create_project():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.CreateProjectRequest(
)
# Make the request
operation = client.create_project(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.CreateProjectRequest, dict]):
The request object. The request sent to the
[CreateProject][google.cloud.resourcemanager.v3.Projects.CreateProject]
method.
project (:class:`google.cloud.resourcemanager_v3.types.Project`):
Required. The Project to create.
Project ID is required. If the requested ID is
unavailable, the request fails.
If the ``parent`` field is set, the
``resourcemanager.projects.create`` permission is
checked on the parent resource. If no parent is set and
the authorization credentials belong to an Organziation,
the parent will be set to that Organization.
This corresponds to the ``project`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.resourcemanager_v3.types.Project` A project is a high-level Google Cloud entity. It is a
container for ACLs, APIs, App Engine Apps, VMs, and
other Google Cloud Platform resources.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.CreateProjectRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project is not None:
request.project = project
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_project,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
projects.Project,
metadata_type=projects.CreateProjectMetadata,
)
# Done; return the response.
return response
async def update_project(
self,
request: Union[projects.UpdateProjectRequest, dict] = None,
*,
project: projects.Project = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the ``display_name`` and labels of the project
identified by the specified ``name`` (for example,
``projects/415104041262``). Deleting all labels requires an
update mask for labels field.
The caller must have ``resourcemanager.projects.update``
permission for this project.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_update_project():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.UpdateProjectRequest(
)
# Make the request
operation = client.update_project(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.UpdateProjectRequest, dict]):
The request object. The request sent to the
[UpdateProject][google.cloud.resourcemanager.v3.Projects.UpdateProject]
method.
Only the `display_name` and `labels` fields can be
change. Use the
[MoveProject][google.cloud.resourcemanager.v3.Projects.MoveProject]
method to change the `parent` field.
project (:class:`google.cloud.resourcemanager_v3.types.Project`):
Required. The new definition of the
project.
This corresponds to the ``project`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Optional. An update mask to
selectively update fields.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.resourcemanager_v3.types.Project` A project is a high-level Google Cloud entity. It is a
container for ACLs, APIs, App Engine Apps, VMs, and
other Google Cloud Platform resources.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.UpdateProjectRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project is not None:
request.project = project
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_project,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("project.name", request.project.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
projects.Project,
metadata_type=projects.UpdateProjectMetadata,
)
# Done; return the response.
return response
async def move_project(
self,
request: Union[projects.MoveProjectRequest, dict] = None,
*,
name: str = None,
destination_parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Move a project to another place in your resource hierarchy,
under a new resource parent.
Returns an operation which can be used to track the process of
the project move workflow. Upon success, the
``Operation.response`` field will be populated with the moved
project.
The caller must have ``resourcemanager.projects.update``
permission on the project and have
``resourcemanager.projects.move`` permission on the project's
current and proposed new parent.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_move_project():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.MoveProjectRequest(
name="name_value",
destination_parent="destination_parent_value",
)
# Make the request
operation = client.move_project(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.MoveProjectRequest, dict]):
The request object. The request sent to
[MoveProject][google.cloud.resourcemanager.v3.Projects.MoveProject]
method.
name (:class:`str`):
Required. The name of the project to
move.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
destination_parent (:class:`str`):
Required. The new parent to move the
Project under.
This corresponds to the ``destination_parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.resourcemanager_v3.types.Project` A project is a high-level Google Cloud entity. It is a
container for ACLs, APIs, App Engine Apps, VMs, and
other Google Cloud Platform resources.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, destination_parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.MoveProjectRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if destination_parent is not None:
request.destination_parent = destination_parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.move_project,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
projects.Project,
metadata_type=projects.MoveProjectMetadata,
)
# Done; return the response.
return response
async def delete_project(
self,
request: Union[projects.DeleteProjectRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Marks the project identified by the specified ``name`` (for
example, ``projects/415104041262``) for deletion.
This method will only affect the project if it has a lifecycle
state of
[ACTIVE][google.cloud.resourcemanager.v3.Project.State.ACTIVE].
This method changes the Project's lifecycle state from
[ACTIVE][google.cloud.resourcemanager.v3.Project.State.ACTIVE]
to
[DELETE_REQUESTED][google.cloud.resourcemanager.v3.Project.State.DELETE_REQUESTED].
The deletion starts at an unspecified time, at which point the
Project is no longer accessible.
Until the deletion completes, you can check the lifecycle state
checked by retrieving the project with [GetProject]
[google.cloud.resourcemanager.v3.Projects.GetProject], and the
project remains visible to [ListProjects]
[google.cloud.resourcemanager.v3.Projects.ListProjects].
However, you cannot update the project.
After the deletion completes, the project is not retrievable by
the [GetProject]
[google.cloud.resourcemanager.v3.Projects.GetProject],
[ListProjects]
[google.cloud.resourcemanager.v3.Projects.ListProjects], and
[SearchProjects][google.cloud.resourcemanager.v3.Projects.SearchProjects]
methods.
This method behaves idempotently, such that deleting a
``DELETE_REQUESTED`` project will not cause an error, but also
won't do anything.
The caller must have ``resourcemanager.projects.delete``
permissions for this project.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_delete_project():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.DeleteProjectRequest(
name="name_value",
)
# Make the request
operation = client.delete_project(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.DeleteProjectRequest, dict]):
The request object. [DeleteProject][google.cloud.resourcemanager.v3.Projects.DeleteProject]
method.
name (:class:`str`):
Required. The name of the Project (for example,
``projects/415104041262``).
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.resourcemanager_v3.types.Project` A project is a high-level Google Cloud entity. It is a
container for ACLs, APIs, App Engine Apps, VMs, and
other Google Cloud Platform resources.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.DeleteProjectRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_project,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
projects.Project,
metadata_type=projects.DeleteProjectMetadata,
)
# Done; return the response.
return response
async def undelete_project(
self,
request: Union[projects.UndeleteProjectRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Restores the project identified by the specified ``name`` (for
example, ``projects/415104041262``). You can only use this
method for a project that has a lifecycle state of
[DELETE_REQUESTED] [Projects.State.DELETE_REQUESTED]. After
deletion starts, the project cannot be restored.
The caller must have ``resourcemanager.projects.undelete``
permission for this project.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_undelete_project():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.UndeleteProjectRequest(
name="name_value",
)
# Make the request
operation = client.undelete_project(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.resourcemanager_v3.types.UndeleteProjectRequest, dict]):
The request object. The request sent to the
[UndeleteProject]
[google.cloud.resourcemanager.v3.Projects.UndeleteProject]
method.
name (:class:`str`):
Required. The name of the project (for example,
``projects/415104041262``).
Required.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.resourcemanager_v3.types.Project` A project is a high-level Google Cloud entity. It is a
container for ACLs, APIs, App Engine Apps, VMs, and
other Google Cloud Platform resources.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = projects.UndeleteProjectRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.undelete_project,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
projects.Project,
metadata_type=projects.UndeleteProjectMetadata,
)
# Done; return the response.
return response
async def get_iam_policy(
self,
request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None,
*,
resource: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Returns the IAM access control policy for the
specified project. Permission is denied if the policy or
the resource do not exist.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_get_iam_policy():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.GetIamPolicyRequest(
resource="resource_value",
)
# Make the request
response = client.get_iam_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]):
The request object. Request message for `GetIamPolicy`
method.
resource (:class:`str`):
REQUIRED: The resource for which the
policy is being requested. See the
operation documentation for the
appropriate value for this field.
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform
resources.
A Policy is a collection of bindings. A binding binds
one or more members to a single role. Members can be
user accounts, service accounts, Google groups, and
domains (such as G Suite). A role is a named list of
permissions (defined by IAM or configured by users).
A binding can optionally specify a condition, which
is a logic expression that further constrains the
role binding based on attributes about the request
and/or target resource.
**JSON Example**
{
"bindings": [
{
"role":
"roles/resourcemanager.organizationAdmin",
"members": [ "user:mike@example.com",
"group:admins@example.com",
"domain:google.com",
"serviceAccount:my-project-id@appspot.gserviceaccount.com"
]
}, { "role":
"roles/resourcemanager.organizationViewer",
"members": ["user:eve@example.com"],
"condition": { "title": "expirable access",
"description": "Does not grant access after
Sep 2020", "expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')", } }
]
}
**YAML Example**
bindings: - members: - user:\ mike@example.com -
group:\ admins@example.com - domain:google.com -
serviceAccount:\ my-project-id@appspot.gserviceaccount.com
role: roles/resourcemanager.organizationAdmin -
members: - user:\ eve@example.com role:
roles/resourcemanager.organizationViewer
condition: title: expirable access description:
Does not grant access after Sep 2020 expression:
request.time <
timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the
[IAM developer's
guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([resource])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.GetIamPolicyRequest(**request)
elif not request:
request = iam_policy_pb2.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_iam_policy,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def set_iam_policy(
self,
request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None,
*,
resource: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Sets the IAM access control policy for the specified project.
CAUTION: This method will replace the existing policy, and
cannot be used to append additional IAM settings.
Note: Removing service accounts from policies or changing their
roles can render services completely inoperable. It is important
to understand how the service account is being used before
removing or updating its roles.
The following constraints apply when using ``setIamPolicy()``:
- Project does not support ``allUsers`` and
``allAuthenticatedUsers`` as ``members`` in a ``Binding`` of
a ``Policy``.
- The owner role can be granted to a ``user``,
``serviceAccount``, or a group that is part of an
organization. For example, group@myownpersonaldomain.com
could be added as an owner to a project in the
myownpersonaldomain.com organization, but not the
examplepetstore.com organization.
- Service accounts can be made owners of a project directly
without any restrictions. However, to be added as an owner, a
user must be invited using the Cloud Platform console and
must accept the invitation.
- A user cannot be granted the owner role using
``setIamPolicy()``. The user must be granted the owner role
using the Cloud Platform Console and must explicitly accept
the invitation.
- Invitations to grant the owner role cannot be sent using
``setIamPolicy()``; they must be sent only using the Cloud
Platform Console.
- Membership changes that leave the project without any owners
that have accepted the Terms of Service (ToS) will be
rejected.
- If the project is not part of an organization, there must be
at least one owner who has accepted the Terms of Service
(ToS) agreement in the policy. Calling ``setIamPolicy()`` to
remove the last ToS-accepted owner from the policy will fail.
This restriction also applies to legacy projects that no
longer have owners who have accepted the ToS. Edits to IAM
policies will be rejected until the lack of a ToS-accepting
owner is rectified.
- Calling this method requires enabling the App Engine Admin
API.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_set_iam_policy():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.SetIamPolicyRequest(
resource="resource_value",
)
# Make the request
response = client.set_iam_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]):
The request object. Request message for `SetIamPolicy`
method.
resource (:class:`str`):
REQUIRED: The resource for which the
policy is being specified. See the
operation documentation for the
appropriate value for this field.
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform
resources.
A Policy is a collection of bindings. A binding binds
one or more members to a single role. Members can be
user accounts, service accounts, Google groups, and
domains (such as G Suite). A role is a named list of
permissions (defined by IAM or configured by users).
A binding can optionally specify a condition, which
is a logic expression that further constrains the
role binding based on attributes about the request
and/or target resource.
**JSON Example**
{
"bindings": [
{
"role":
"roles/resourcemanager.organizationAdmin",
"members": [ "user:mike@example.com",
"group:admins@example.com",
"domain:google.com",
"serviceAccount:my-project-id@appspot.gserviceaccount.com"
]
}, { "role":
"roles/resourcemanager.organizationViewer",
"members": ["user:eve@example.com"],
"condition": { "title": "expirable access",
"description": "Does not grant access after
Sep 2020", "expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')", } }
]
}
**YAML Example**
bindings: - members: - user:\ mike@example.com -
group:\ admins@example.com - domain:google.com -
serviceAccount:\ my-project-id@appspot.gserviceaccount.com
role: roles/resourcemanager.organizationAdmin -
members: - user:\ eve@example.com role:
roles/resourcemanager.organizationViewer
condition: title: expirable access description:
Does not grant access after Sep 2020 expression:
request.time <
timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the
[IAM developer's
guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([resource])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.SetIamPolicyRequest(**request)
elif not request:
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def test_iam_permissions(
self,
request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None,
*,
resource: str = None,
permissions: Sequence[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> iam_policy_pb2.TestIamPermissionsResponse:
r"""Returns permissions that a caller has on the
specified project.
.. code-block:: python
from google.cloud import resourcemanager_v3
def sample_test_iam_permissions():
# Create a client
client = resourcemanager_v3.ProjectsClient()
# Initialize request argument(s)
request = resourcemanager_v3.TestIamPermissionsRequest(
resource="resource_value",
permissions=['permissions_value_1', 'permissions_value_2'],
)
# Make the request
response = client.test_iam_permissions(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]):
The request object. Request message for
`TestIamPermissions` method.
resource (:class:`str`):
REQUIRED: The resource for which the
policy detail is being requested. See
the operation documentation for the
appropriate value for this field.
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
permissions (:class:`Sequence[str]`):
The set of permissions to check for the ``resource``.
Permissions with wildcards (such as '*' or 'storage.*')
are not allowed. For more information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
This corresponds to the ``permissions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse:
Response message for TestIamPermissions method.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([resource, permissions])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.TestIamPermissionsRequest(**request)
elif not request:
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions,
)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-resourcemanager",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ProjectsAsyncClient",)
| googleapis/python-resource-manager | google/cloud/resourcemanager_v3/services/projects/async_client.py | Python | apache-2.0 | 70,063 | 0.001313 |
from mock import patch
from .test_helper import raises
from kiwi.exceptions import KiwiPrivilegesError
from kiwi.privileges import Privileges
class TestPrivileges(object):
@raises(KiwiPrivilegesError)
@patch('os.geteuid')
def test_check_for_root_permiossion_false(self, mock_euid):
mock_euid.return_value = 1
Privileges.check_for_root_permissions()
@patch('os.geteuid')
def test_check_for_root_permiossion_true(self, mock_euid):
mock_euid.return_value = 0
assert Privileges.check_for_root_permissions() is True
| adrianschroeter/kiwi | test/unit/privileges_test.py | Python | gpl-3.0 | 568 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('predict', '0002_auto_20160524_0947'),
]
operations = [
migrations.RemoveField(
model_name='predictdataset',
name='dropbox_url',
),
migrations.AlterField(
model_name='predictdataset',
name='file_type',
field=models.CharField(max_length=25, choices=[(b'vcf', b'Variant Call Format (VCF)'), (b'fastq', b'FastQ Nucleotide Sequence'), (b'manual', b'Mutations Manual Entry')]),
),
migrations.AlterField(
model_name='predictdataset',
name='title',
field=models.CharField(max_length=255, verbose_name=b'Dataset Title'),
),
]
| IQSS/gentb-site | apps/predict/migrations/0003_auto_20160525_1521.py | Python | agpl-3.0 | 853 | 0.002345 |
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
import os
import h5py
import traceback
import threading
import logging
from lazyflow.graph import Operator, InputSlot, OutputSlot, OrderedSignal
from lazyflow.operators import OpBlockedArrayCache
from lazyflow.operators.ioOperators import OpH5WriterBigDataset
from lazyflow.utility.pathHelpers import PathComponents
from lazyflow.rtype import SubRegion
logger = logging.getLogger(__name__)
class ExportFormat():
H5 = 0
Npy = 1
Tiff = 2 # 3d only, up to 3 channels
def __init__(self, name, extension):
self.name = name
self.extension = extension
SupportedFormats = { ExportFormat.H5 : ExportFormat("Hdf5", '.h5') }
#SupportedFormats = { ExportFormat.H5 : ExportFormat("Hdf5", '.h5'),
# ExportFormat.Npy : ExportFormat("Numpy", '.npy'),
# ExportFormat.Tiff : ExportFormat("Tiff", '.tiff') }
class OpBatchIoSelective(Operator):
"""
The top-level operator for the Batch IO applet.
"""
name = "OpBatchIo"
category = "Top-level"
ExportDirectory = InputSlot(stype='filestring') # A separate directory to export to. If '', then exports to the input data's directory
Format = InputSlot(stype='int') # The export format
Suffix = InputSlot(stype='string') # Appended to the file name (before the extension)
InternalPath = InputSlot(stype='string', optional=True) # Hdf5 internal path
DatasetPath = InputSlot(stype='string') # The path to the original the dataset we're saving
ImageToExport = InputSlot() # The image that needs to be saved
SelectedSlices = InputSlot(stype='list')
OutputFileNameBase = InputSlot(stype='string', optional=True) # Override for the file name base. (Input filename is used by default.)
Dirty = OutputSlot(stype='bool') # Whether or not the result currently matches what's on disk
OutputDataPath = OutputSlot(stype='string')
ExportResult = OutputSlot(stype='string') # When requested, attempts to store the data to disk. Returns the path that the data was saved to.
ProgressSignal = OutputSlot(stype='object')
def __init__(self, *args, **kwargs):
super(OpBatchIoSelective, self).__init__(*args, **kwargs)
self.Dirty.meta.shape = (1,)
self.Dirty.meta.dtype = bool
self.OutputDataPath.meta.shape = (1,)
self.OutputDataPath.meta.dtype = object
self.ExportResult.meta.shape = (1,)
self.ExportResult.meta.dtype = object
# Provide default values
self.ExportDirectory.setValue( '' )
self.Format.setValue( ExportFormat.H5 )
self.Suffix.setValue( '_results' )
self.Dirty.setValue(True)
self.progressSignal = OrderedSignal()
self.ProgressSignal.setValue( self.progressSignal )
self._createDirLock = threading.Lock()
#make a cache of the input image not to request too much
self.ImageCache = OpBlockedArrayCache(parent=self)
self.ImageCache.fixAtCurrent.setValue(False)
self.ImageCache.Input.connect(self.ImageToExport)
def setupOutputs(self):
# Create the output data path
formatId = self.Format.value
ext = SupportedFormats[formatId].extension
inputPathComponents = PathComponents(self.DatasetPath.value)
# If no export directory was given, use the original input data's directory
if self.ExportDirectory.value == '':
outputPath = inputPathComponents.externalDirectory
else:
outputPath = self.ExportDirectory.value
if self.OutputFileNameBase.ready():
filenameBase = PathComponents(self.OutputFileNameBase.value).filenameBase
else:
filenameBase = inputPathComponents.filenameBase
outputPath = os.path.join(outputPath, filenameBase + self.Suffix.value + ext).replace('\\', '/')
# Set up the path for H5 export
if formatId == ExportFormat.H5:
if self.InternalPath.ready() and self.InternalPath.value != '':
# User-specified internal path
self._internalPath = self.InternalPath.value
if self._internalPath[0] != '/':
self._internalPath = "/" + self._internalPath
elif inputPathComponents.internalPath is not None:
# Mirror the input data internal path
self._internalPath = inputPathComponents.internalPath
else:
self._internalPath = '/volume/data'
self.OutputDataPath.setValue( outputPath + self._internalPath )
elif formatId == ExportFormat.Npy:
self.OutputDataPath.setValue( outputPath )
elif formatId == ExportFormat.Tiff:
self.OutputDataPath.setValue( outputPath )
self.setupCaches()
def setupCaches(self):
# Set the blockshapes for each input image separately, depending on which axistags it has.
axisOrder = [ tag.key for tag in self.ImageToExport.meta.axistags ]
## Pixel Cache blocks
blockDimsX = { 't' : (1,1),
'z' : (128,256),
'y' : (128,256),
'x' : (1,1),
'c' : (100, 100) }
blockDimsY = { 't' : (1,1),
'z' : (128,256),
'y' : (1,1),
'x' : (128,256),
'c' : (100,100) }
blockDimsZ = { 't' : (1,1),
'z' : (1,1),
'y' : (128,256),
'x' : (128,256),
'c' : (100,100) }
innerBlockShapeX = tuple( blockDimsX[k][0] for k in axisOrder )
outerBlockShapeX = tuple( blockDimsX[k][1] for k in axisOrder )
innerBlockShapeY = tuple( blockDimsY[k][0] for k in axisOrder )
outerBlockShapeY = tuple( blockDimsY[k][1] for k in axisOrder )
innerBlockShapeZ = tuple( blockDimsZ[k][0] for k in axisOrder )
outerBlockShapeZ = tuple( blockDimsZ[k][1] for k in axisOrder )
self.ImageCache.inputs["innerBlockShape"].setValue( innerBlockShapeZ )
self.ImageCache.inputs["outerBlockShape"].setValue( outerBlockShapeZ )
def propagateDirty(self, slot, subindex, roi):
# Out input data changed, so we have work to do when we get executed.
self.Dirty.setValue(True)
def execute(self, slot, subindex, roi, result):
if slot == self.Dirty:
assert False # Shouldn't get to this line because the dirty output is given a value directly
if slot == self.OutputDataPath:
assert False # This slot is already set via setupOutputs
if slot == self.ExportResult:
# We can stop now if the output isn't dirty
if not self.Dirty.value:
result[0] = True
return
exportFormat = self.Format.value
# Export H5
if exportFormat == ExportFormat.H5:
pathComp = PathComponents(self.OutputDataPath.value)
# Ensure the directory exists
if not os.path.exists(pathComp.externalDirectory):
with self._createDirLock:
# Check again now that we have the lock.
if not os.path.exists(pathComp.externalDirectory):
os.makedirs(pathComp.externalDirectory)
# Open the file
try:
hdf5File = h5py.File(pathComp.externalPath)
except:
logger.error("Unable to open hdf5File: " + pathComp.externalPath)
logger.error( traceback.format_exc() )
result[0] = False
return
# Set up the write operator
opH5Writer = OpH5WriterBigDataset(parent=self)
opH5Writer.hdf5File.setValue( hdf5File )
opH5Writer.hdf5Path.setValue( pathComp.internalPath )
#opH5Writer.Image.connect( self.ImageToExport )
opH5Writer.Image.connect(self.ImageCache.Output)
print "computing predictions for the selected slices:"
self.ImageCache.fixAtCurrent.setValue(False)
#check readiness
for inp in self.ImageCache.inputs:
print inp, self.ImageCache.inputs[inp].ready()
print "input shape:", self.ImageCache.Input.meta.shape
print "output shape:", self.ImageCache.Output.meta.shape
selectedSlices = self.SelectedSlices.value
zaxis = self.ImageToExport.meta.axistags.index('z')
for isl, sl in enumerate(selectedSlices):
print "computing for slice ...", isl
start = [0]*len(self.ImageToExport.meta.shape)
start[zaxis]=sl
stop = list(self.ImageToExport.meta.shape)
stop[zaxis]=sl+1
roi = SubRegion(self.ImageCache, start=start, stop=stop)
print roi
temp = self.ImageCache.Output[roi.toSlice()].wait()
#print temp
self.ImageCache.fixAtCurrent.setValue(True)
#tstart = [0]*len(self.ImageToExport.meta.shape)
#tstop = list(self.ImageToExport.meta.shape)
#troi = SubRegion(self.ImageCache, start=tstart, stop=tstop)
#tttemp = self.ImageCache.Output[troi.toSlice()].wait()
#print tttemp
# The H5 Writer provides it's own progress signal, so just connect ours to it.
opH5Writer.progressSignal.subscribe( self.progressSignal )
# Trigger the write
self.Dirty.setValue( not opH5Writer.WriteImage.value )
hdf5File.close()
opH5Writer.cleanUp()
# elif exportFormat == ExportFormat.Npy:
# assert False # TODO
# elif exportFormat == ExportFormat.Npy:
# assert False # TODO
else:
assert False, "Unknown export format"
result[0] = not self.Dirty.value
| nielsbuwen/ilastik | ilastik/applets/autocontextClassification/opBatchIoSelective.py | Python | gpl-3.0 | 11,464 | 0.012648 |
"""
This file is part of py-sonic.
py-sonic is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
py-sonic is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with py-sonic. If not, see <http://www.gnu.org/licenses/>
"""
class SonicError(Exception):
pass
class ParameterError(SonicError):
pass
class VersionError(SonicError):
pass
class CredentialError(SonicError):
pass
class AuthError(SonicError):
pass
class LicenseError(SonicError):
pass
class DataNotFoundError(SonicError):
pass
class ArgumentError(SonicError):
pass
# This maps the error code numbers from the Subsonic server to their
# appropriate Exceptions
ERR_CODE_MAP = {
0: SonicError ,
10: ParameterError ,
20: VersionError ,
30: VersionError ,
40: CredentialError ,
50: AuthError ,
60: LicenseError ,
70: DataNotFoundError ,
}
def getExcByCode(code):
code = int(code)
if code in ERR_CODE_MAP:
return ERR_CODE_MAP[code]
return SonicError
| ties/py-sonic | libsonic/errors.py | Python | gpl-3.0 | 1,413 | 0.012739 |
import time
import numpy as np
import cPickle as pk
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.widgets import Slider
import mpl_toolkits.mplot3d.axes3d as p3
from matplotlib.widgets import Button
class PlotAnimation:
"""
Takes a list of PySnap and launch an interactive animation.
"""
def __init__(self, Anim, DataControl, **kwargs):
self.data = DataControl
self.previous_n = 0
self.Anim= Anim
def timer_update(self):
if self.Anim.n != self.previous_n:
self.data.Update(self.Anim.n)
self.previous_n = self.Anim.n
def launch(self):
self.data.Initialize()
self.timer=self.data.fig.canvas.new_timer(interval=self.Anim.delay)
args=[]
# We tell the timer to call the update function every 100ms
self.timer.add_callback(self.timer_update,*args)
self.timer.start()
if __name__ is "__main__":
from quick import *
R = ReadRun("/home/dorval/work/amuse/clump_finding/p10k_fragmentation/")
R.Animation()
A = R.Anim
class Data():
def __init__(self,):
self.X = [ np.random.random(20) for i in range(len(R))]
self.Y = [ np.random.random(20) for i in range(len(R))]
def Initialize(self):
X, Y = self.X[0], self.Y[0]
self.fig = plt.figure()
self.ax = self.fig.add_subplot(111)
self.line, = plt.plot(X, Y, "b")
self.canvas=self.ax.figure.canvas
def Update(self, nx, ny):
X, Y = self.X[n], self.Y[n]
self.line.set_data(X, Y)
self.canvas.draw()
D= Data()
P = PlotAnimation(A,D)
P.launch()
plt.show()
| dorvaljulien/StarFiddle | anim_plot.py | Python | mit | 1,736 | 0.009793 |
# -----------------------------------------------------------
# reads the text from the given file, and outputs its
# character statistics
#o
# (C) 2015 Frank Hofmann, Berlin, Germany
# Released under GNU Public License (GPL)
# email frank.hofmann@efho.de
# -----------------------------------------------------------
# call the program this way:
# python character-statistics.py inputfile.txt > statistics.csv
# import required python standard modules
import sys,csv
import codecs
import os
# define character count function
def charStat (text):
# set default value
stat = {}
# go through the characters one by one
for character in text:
#print (character)
# retrieve current value for a character,
# and 0 if still not in list
# update the list
stat[character] = stat.get(character,0) + 1
# return statistics array
return stat
# count number of program parameters
numPara = len(sys.argv)
if numPara < 2:
print ("invalid number of parameters: 1 filename required.")
print ("call for output on-screen: python %s " % sys.argv[0])
print ("call for file output: python %s > statistics.csv" % sys.argv[0])
print ("Exiting.")
sys.exit(2)
# read name of the datafile
textfileName = sys.argv[1]
# print ("reading text from", textfileName, "...")
bytes = min(32, os.path.getsize(textfileName))
raw = open(textfileName, 'rb').read(bytes)
if raw.startswith(codecs.BOM_UTF8):
encoding = 'utf-8-sig'
else:
result = chardet.detect(raw)
encoding = result['encoding']
# open file for reading
fileHandle = open(textfileName, "r", encoding=encoding)
# read content
data = fileHandle.read()
# close file
fileHandle.close()
# calculate the character statisitics
statistics = charStat(data)
# retrieve the single items
items = statistics.items()
# print ("sorting by character ...")
# sort the items
sortedItems = sorted(items)
lines = []
# output sorted list as CSV data
for singleItem in sortedItems:
lines.append(str(singleItem[0]) + "," + singleItem[1])
#print ("%s,%i" % (singleItem[0], singleItem[1]))
# open file for writing
fileHandle = open("s.txt", "w", encoding=encoding)
# read content
data = fileHandle.writelines(lines)
# close file
fileHandle.close()
| hofmannedv/training-python | text-analysis/character-statistics.py | Python | gpl-2.0 | 2,207 | 0.017218 |
import numpy as np
import scipy
import re
import os
import hashlib
import csb
from csb.bio.io.wwpdb import StructureParser
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i+n]
class ScatteringFactor(object):
"""
Cacluates the density in reciprocal space as
F(s) = sum_m f_m(s) exp(-B_m s**2 / 4) exp(i*2pi*s*r)
where f_m(s) is approximated by four Gaussian distributions
and exp(-B_m s**2 / 4) are the thermal fluctuations
g_m(s) = f_m(s) * exp(-B_m s**2 / 4) are precomputed
"""
def __init__(self, structure=None):
if structure is None:
self._atoms = list()
self._bfactor = list()
self._seq = list()
self._elements = list()
else:
self._structure = structure
# For now only non hydrogen atoms
# TODO use hydrogens as well
self._atoms = []
for chain in structure:
for residue in structure[chain]:
for atom in residue:
a = residue[atom]
if not a.name.startswith("H"):
self._atoms.append(residue[atom])
self._seq = []
self._bfactor = []
self._elements = []
for atom in self._atoms:
self._seq.append(atom.element.name)
self._elements.append(atom.element.name)
if atom._bfactor is None:
self._bfactor.append(1.)
else:
self._bfactor.append(atom._bfactor)
self._seq = np.array(self._seq)
self._elements = set(self._elements)
self._bfactor = np.clip(self._bfactor, 1., 100.)
self._atom_type_params = {}
self._read_sf(fn=os.path.expanduser("~/projects/xfel/py/xfel/core/atomsf.lib"))
@classmethod
def from_isd(cls, universe):
obj = cls()
atoms = universe.atoms
for atom in atoms:
element = str(atom.properties['element'].name)
obj._elements.append(element)
obj._atoms.append(atom)
obj._seq.append(element)
try:
obj._bfactor.append(max(1.,atom.properties['bfactor']))
except KeyError:
obj._bfactor.append(1.)
obj._seq = np.array(obj._seq)
obj._bfactor = np.array(obj._bfactor)
obj._elements = set(obj._elements)
obj._bfactor = np.clip(obj._bfactor, 1., 100.)
return obj
def _read_sf(self, fn):
"""
Reads the coefficients for the analystical approximation
to scattering factors from ccp4 database
"""
float_pattern = '[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?'
atom_pattern = '[A-Za-z]'
atom_pattern = '[A-Za-z0-9-+]+'
line_pattern = ("({0})\s+({1})"
"\s+({1})\s+({1})"
"\s+({1})\s+({1})"
"\s+({1})\s+({1})"
"\s+({1})\s+({1})").format(atom_pattern,float_pattern)
regex = re.compile(line_pattern)
with open(fn) as file_handle:
for line in file_handle:
if line.startswith("#"):
continue
m = regex.match(line)
atom_name = m.groups()[0]
a1, a2, a3, a4 = m.groups()[1], m.groups()[3], m.groups()[5], m.groups()[7]
b1, b2, b3, b4 = m.groups()[2], m.groups()[4], m.groups()[6], m.groups()[8]
c = m.groups()[9]
a = np.array([a1,a2,a3,a4],np.double)
b = np.array([b1,b2,b3,b4],np.double)
self._atom_type_params[atom_name] = (a,b,float(c))
def _calculate_gm(self, hkl):
"""
calculates the the product of scattering factor and
debye-waller factors
"""
f = np.zeros((len(self._atoms), hkl.shape[0]))
seq = self._seq
bfactor = self._bfactor
s_tols = 0.25 * (hkl**2).sum(-1)
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
fx = c + np.dot(np.exp(np.outer(-s_tols,b)),a)
f[indices,:] = fx[:]
f *= np.exp(np.outer(-bfactor,s_tols))
return f
def _calculate_gm_grad(self, hkl):
"""
calculate the gradien of the scattering factor and
debye-waller factor
"""
seq = np.array([a.element.name for a in self._atoms])
f = np.zeros((len(self._atoms), hkl.shape[0]))
dfg = np.zeros((len(self._atoms), hkl.shape[0], 3))
bfactors = np.array([a.bfactor for a in self._atoms])
bfactors = np.clip(bfactors, 1., 100.)
s_tols = 0.25 * (hkl**2).sum(-1)
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
bfactor = bfactors[indices]
g = np.exp(np.outer(-s_tols,b))
sf = np.dot(g, a) + c
gsf = np.sum(g * a[np.newaxis,:] * b[np.newaxis,:] * -0.5, -1)
dwf = np.exp(-np.outer(bfactor, s_tols))
gdwf = dwf * (bfactor * - 0.5)[:,np.newaxis]
grad = sf * gdwf + gsf * dwf
f[indices,:] = dwf * sf
dfg[indices,:,:] = grad[:,:,np.newaxis] * hkl
return dfg, f
def _calculate_scattering_factors(self, hkl):
"""
creates an approximation of the density in reciprocal space by
four gaussians
returns the scattering vectors
"""
seq = self._seq
bfactor = self._bfactor
f = np.zeros((len(self._atoms), hkl.shape[0]))
s_tols = 0.25 * (hkl**2).sum(-1)
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
fx = c + np.dot(np.exp(np.outer(-s_tols,b)),a)
f[indices,:] = fx[:]
return f
def _calculate_debyewaller_factors(self, hkl):
"""
"""
b = np.array(self._bfactor)
s_tols = 0.25 * (hkl**2).sum(-1)
t = np.exp(np.outer(-b,s_tols))
return t
def grad_s(self, X, hkl):
"""
Gradient with respect to the reciprocal space coordinates
@param X: atomic positions
@param hkl: reciprocal space positions
"""
seq = np.array([atom.element.name for atom in self._atoms])
bfactor = np.array([atom.bfactor for atom in self._atoms])
bfactor = np.clip(bfactor, 1., 100.)
s_tols = 0.25 * (hkl**2).sum(-1)
dw_factors = np.exp(np.outer(-bfactor, s_tols))
def grad_hkl(self, X, hkl):
seq = self._seq
bfactor = self._bfactor
bfactor = np.clip(bfactor, 1., 100.)
dg = np.zeros((len(self._atoms), hkl.shape[0], hkl.shape[1]))
g = np.zeros((len(self._atoms), hkl.shape[0]))
s_tols = 0.25 * (hkl**2).sum(-1)
dw_factors = np.exp(np.outer(-bfactor, s_tols))
ddw_factors = bfactor[:,np.newaxis] * dw_factors
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
inner_exp = np.exp(np.outer(-s_tols,b))
sf = np.dot(inner_exp, a) + c
dsf = np.dot(inner_exp, a*b)
gx = dsf * dw_factors[indices] + sf * ddw_factors[indices]
g[indices,:] = sf[:] * dw_factors[indices]
a = np.einsum('ab,bc->abc',gx, -0.5*hkl)
dg[indices,:,:] = a
phase = np.dot((2 * np.pi * X),hkl.T)
fx= np.sum(g * np.exp(1j * phase),0)
g2 = np.einsum('ba,bc->bac',g , 2 * np.pi * 1j *X)
dfx = np.einsum("abc,ab->bc",dg + g2,np.exp(1j * phase))
return dfx, fx
def calculate_structure_factors(self, X, hkl):
"""
TODO do this calculation in chunks to save space
"""
F = np.zeros(hkl.shape[0], dtype=np.complex128)
lim = hkl.shape[0]
step = 512
for i in range(0,lim,step):
_hkl = hkl[i:i+step]
f = self._calculate_scattering_factors(_hkl)
f *= self._calculate_debyewaller_factors(_hkl)
phase = np.dot((2 * np.pi * X),_hkl.T)
F[i:i+step] = np.sum(f * np.exp(1j * phase),0)
return F
def calculate_structure_factor_gradient(self, X, hkl):
"""
calculates the gradient of the fourier density
with respect to the atomic coordinates
"""
G = np.zeros(hkl.shape, dtype=np.complex128)
lim = hkl.shape[0]
F = np.zeros(hkl.shape[0], dtype=np.complex128)
step = 512
for i in range(0, lim, step):
_hkl = hkl[i:i+step]
dfg, f = self._calculate_gm_grad(_hkl)
phase = np.exp(1j * np.dot((2 * np.pi * X), _hkl.T))
gphase = phase[:, :, np.newaxis] *\
1j * 2 * np.pi * X[:, np.newaxis, :]
grad = dfg * phase[:, :, np.newaxis]
grad += f[:, :, np.newaxis] * gphase
F[i: i+step] = np.sum(f * phase, 0)
G[i: i+step, :] = np.sum(grad, 0)
return G, F
def calculate_structure_factor_gradient2(self, X):
"""
calculates the gradient of the fourier density
with respect to the atomic coordinates
"""
g_m = self._calculate_scattering_factors(hkl)
g_m *= self._calculate_debyewaller_factors(hkl)
phase = np.dot((2 * np.pi * X),self._hkl.T)
fx = (g_m *1j * 2 * np.pi * np.exp(1j * phase))
dF_dx = np.array([np.multiply.outer(s,fx_s) for s,fx_s in
zip(fx.T,self._hkl)])
return dF_dx
def calculate_intensity_gradient(self, X):
"""
calculates the gradient of the intensity with respect to the atomic coordinates dI/dx
"""
g_m = self._calculate_scattering_factors(self._hkl)
g_m *= self._calculate_debyewaller_factors(self._hkl)
phase = np.dot((2 * np.pi * X),self._hkl.T)
F = np.sum(g_m * np.exp(1j * phase),0)
fx = (g_m *1j * 2 * np.pi * np.exp(1j * phase))
dF_dx = np.array([np.multiply.outer(s,fx_s) for s,fx_s in zip(fx.T,self._hkl)])
dI_dx = np.conj(F[:,np.newaxis,np.newaxis]) * dF_dx + F[:,np.newaxis,np.newaxis] * np.conj(dF_dx)
return dI_dx
class Correlations(object):
def __init__(self, angles, nbins):
self._bin_angles(angles, nbins)
def _bin_angles(self, angles, nbins):
pass
def calculate_from_density(self, rho):
pass
class OnePhotonCorrelations(Correlations):
def _bin_angles(self, angles, nbins):
d = np.sqrt(np.sum(angles**2,-1))
lower = d.min()
upper = d.max()
axes = np.linspace(lower, upper, nbins)
indices = np.argsort(d)
bins = [[] for x in xrange(nbins)]
j = 0
for i in range(0,axes.shape[0]):
right_edge = axes[i]
print right_edge, i
while d[indices[j]] < right_edge:
bins[i-1].append(indices[j])
j += 1
bins[-1] = indices[j:].tolist()
self._axes = axes
self._bins = bins
def calculate_from_density(self, rho):
I = np.asarray([np.sum(rho.take(bin))
for bin in self._bins])
return I
class CachedScatteringFactor(ScatteringFactor):
def __init__(self, structure):
super(CachedScatteringFactor,self).__init__(structure)
self._f = None
def calculate_structure_factors(self, X, hkl):
if self._f is None:
print "calc f"
self._f = self._calculate_scattering_factors(hkl)
self._f *= self._calculate_debyewaller_factors(hkl)
else:
print "using cached f"
phase = np.dot((-2 * np.pi * X),hkl.T)
F = np.sum(self._f * np.exp(1j * phase),0)
return F
class SphericalSection(object):
def get(self,
n_points=20, radius=1.0,
polar_min=0., polar_max=np.pi,
azimut_min=0., azimut_max=2*np.pi):
theta = np.linspace(polar_min,polar_max, n_points)
phi = np.linspace(azimut_min, azimut_max, n_points)
x = np.outer(radius*np.sin(theta), np.cos(phi))
y = np.outer(radius*np.sin(theta), np.sin(phi))
z = np.outer(radius*np.cos(theta), np.ones(n_points))
return [x,y,z]
class EwaldSphereProjection(object):
def get_indices(self, wavelength, x,y,z):
"""
projects dectector points onto an Ewald Sphere
x, y, z are the pixel coordinates
x, y, z are all M x N matrices, where M x N is the detector size.
It is assumed that the detector is perpendicular to the Z-axis
"""
d = np.sqrt(x**2 + y**2 + z**2)
h = 1/wavelength * (x/d)
k = 1/wavelength * (y/d)
l = 1/wavelength * (z/d)
return h,k,l
def project(self, structure_factor, angle):
pass
if __name__ == "__main__":
import matplotlib
matplotlib.interactive(True)
import time
import os
import seaborn as sns
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import pylab
from pylab import *
from csb.bio.io.wwpdb import StructureParser
from csb.bio.io.wwpdb import get
from xfel.core.density import Density
#structure = get("1L2Y")
#structure = StructureParser(os.path.expanduser("~/data/pdb/caffeine2.pdb")).parse()
#fn = os.path.expanduser("~/gsh.pdb")
structure = StructureParser(os.path.expanduser("~/projects/xfel/data/GTT_short.pdb")).parse()
x = np.linspace(-1.,1.,11)
h, k, l = np.meshgrid(x,x,x)
hkl = np.vstack([item.ravel() for item in [h,k,l]]).T
hkl = np.ascontiguousarray(hkl)
bf = np.random.random()
def bfactors(hkl, bf):
return np.exp(-0.25 * bf * (hkl**2).sum(-1))
def bfactor_grad(hkl):
return np.exp(-0.25 * bf * (hkl**2).sum(-1))[:,np.newaxis] * -0.5 * hkl * bf
a = np.random.random(4,)
b = np.random.random(4,)
c = 0.3
def sf(hkl,a,b,c):
s_tols = -0.25 * (hkl**2).sum(-1)
inner_exp = np.exp(np.outer(-s_tols,b))
sf = np.dot(inner_exp, a) + c
return sf
def sf_grad(hkl, a, b, c):
s_tols = -0.25 * (hkl**2).sum(-1)
sf = np.exp(np.outer(-s_tols,b)) * a[np.newaxis,:] * b[np.newaxis,:] * 0.5
return sf.sum(-1)[:,np.newaxis] * hkl
def gm(hkl, a, b, c, bf):
s_tols = -0.25 * (hkl**2).sum(-1)
inner_exp = np.exp(np.outer(-s_tols,b))
sf = np.dot(inner_exp, a) + c
bf = np.exp(bf * s_tols)
return sf * bf
def gm_grad(hkl, a, b, c, bf):
s_tols = -0.25 * (hkl**2).sum(-1)
g = np.exp(np.outer(-s_tols,b))
sf = np.dot(g, a) + c
gsf = np.sum(g * a[np.newaxis,:] * b[np.newaxis,:] * 0.5, -1)
bb = np.exp(bf * s_tols)
gb = bb * bf * - 0.5
grad = sf * gb + gsf * bb
return grad[:,np.newaxis] * hkl
sf = ScatteringFactor(structure)
X = np.array([a.vector for a in sf._atoms])
X -= X.mean(0)
if False:
n = 10
X = X[:n]
sf._seq = sf._seq[:n]
sf._elements = ['N', 'C']
sf._atoms = sf._atoms[:n]
sf._bfactor = sf._bfactor[:n]
dgm, f1 = sf._calculate_gm_grad(hkl)
f = sf._calculate_scattering_factors(hkl)
f *= sf._calculate_debyewaller_factors(hkl)
scatter(f.real.ravel(), f1.real.ravel())
dgm2 = dgm * 0.0
eps = 1e-7
for i in range(3):
hkl[:, i] += eps
fprime = sf._calculate_scattering_factors(hkl)
fprime *= sf._calculate_debyewaller_factors(hkl)
dgm2[:, :, i] = (fprime - f)/eps
hkl[:, i] -= eps
figure()
scatter(dgm.real.ravel(), dgm2.real.ravel())
G, FF = sf.calculate_structure_factor_gradient(X, hkl)
G2 = G * 0.0
F = sf.calculate_structure_factors(X, hkl)
eps = 1e-7
for i in range(3):
hkl[:,i] += eps
G2[:,i] = (sf.calculate_structure_factors(X, hkl) - F)/eps
hkl[:,i] -= eps
figure()
scatter(G.real.ravel(), G2.real.ravel())
scatter(G.imag.ravel(), G2.imag.ravel())
figure()
scatter(F.real.ravel(), FF.real.ravel())
show()
t0 = time.time()
G, FF = sf.calculate_structure_factor_gradient(X, hkl)
print "hkl gradient: {} \n".format(time.time() - t0)
t0 = time.time()
g = sf.grad_hkl(X, hkl)
print "X gradient: {} \n".format(time.time() - t0)
raise
sf = ScatteringFactor(structure)
sf._hkl = hkl
X = np.array([a.vector for a in sf._atoms])
X -= X.mean(0)
g,g2 = sf.grad_hkl(X, hkl)
F = sf.calculate_structure_factors(X,hkl)
gi= sf.calculate_intensity_gradient(X)
raise
F = F.reshape(h.shape)
rho = np.fft.fftshift(np.abs(np.fft.ifftn(F,[250,250,250])))
grid = Density.from_voxels(np.abs(F)**2,1.)
grid.write_gaussian(os.path.expanduser("~/mr.cube"))
raise
grid = Density.from_voxels(rho,1.)
grid.write_gaussian(os.path.expanduser("~/mr2.cube"))
raise
if True:
fig = pylab.figure()
ax = fig.add_subplot(131)
xi, yi= np.mgrid[0:500:1,0:500:1]
ax.contour(xi,yi, rho.sum(0), 30)
pylab.show()
ax = fig.add_subplot(132)
xi, yi= np.mgrid[0:500:1,0:500:1]
ax.contour(xi,yi, rho.sum(1), 30)
pylab.show()
ax = fig.add_subplot(133)
xi, yi= np.mgrid[0:500:1,0:500:1]
ax.contour(xi,yi, rho.sum(2), 30)
pylab.show()
raise
from mayavi import mlab
xi, yi, zi = np.mgrid[0:500:1,0:500:1,0:500:1]
obj = mlab.contour3d(rho, contours=10, transparent=True)
mlab.show()
from mayavi import mlab
obj = mlab.contour3d(np.abs(F), contours=10, transparent=True)
mlab.show()
raise
for ii in range(0,F.shape[0],25):
fig = pylab.figure()
ax = fig.add_subplot(111)
xi, yi= np.mgrid[0:500:1,0:500:1]
ax.contour(xi,yi,rho[ii,:,:], 30)
pylab.show()
I = np.abs(F)**2
fig = pylab.figure()
ax = fig.add_subplot(111)
nx, ny, nz = I.shape
xi, yi= np.mgrid[0:nx:1,0:ny:1]
ax.contour(xi,yi, I.sum(2), 15)
| mmechelke/bayesian_xfel | bxfel/core/structure_factor.py | Python | mit | 18,608 | 0.010963 |
# -*- coding: utf-8 -*-
"""
pycclib
library for accessing the cloudControl API using Python
Copyright 2010 cloudControl UG (haftungsbeschraenkt)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
### basic usage example
# from pycclib.cclib import *
#
# api = API()
# api.create_token(email='name@example.com', password='secretpassword')
#
# apps = api.read_apps()
"""
import base64
from urlparse import urlparse
import calendar
import urllib
# python versions below 2.6 do not have json included we need simplejson then
try:
import json
except ImportError:
import simplejson as json
import time
from urllib import urlencode
import socket
from decimal import Decimal
import certifi
import httplib2
from pycclib.version import __version__
# We avoid the potential risk of somebody relying on the deprecated apiurl.py
# by raising an exception to make sure nobody talks to the wrong API due to
# our backwards incompatible change.
try:
from pycclib import apiurl
except ImportError:
pass
else:
raise Exception('Use of apiurl.py is deprecated. Set pycclib.API_URL instead.')
__all__ = ['API', 'UnauthorizedError', 'ConnectionException',
'TokenRequiredError', 'BadRequestError', 'ForbiddenError',
'ConflictDuplicateError', 'GoneError', 'InternalServerError',
'NotImplementedError', 'ThrottledError']
API_URL = 'https://api.cloudcontrolled.com'
DISABLE_SSL_CHECK = False
CA_CERTS = None
CACHE = None
# Set debug to 1 to enable debugging
DEBUG = 0
VERSION = __version__
class API():
"""
The API class contains all methods to access the cloudControl RESTful
API.
It wraps the HTTP requests to resources in convenient methods and also
takes care of authenticating each request with a token, if needed.
The create_token, check_token, get_token and set_token methods can be
used to work with the token from outside the API class. This might be
useful when it is not intended to ask users for their email and
password for new instances of the API class.
To instantiate API with a predefined token use something like:
# token = json.loads('{"token": "A2wY7qgUNM5eTRM3Lz6D4RZHuGmYPP"}')
# api = API(token=token)
"""
_token = None
request = None
cache = None
def __init__(self, token=None, url=None, token_source_url=None, register_addon_url=None, encode_email=False):
self.set_token(token)
api_url = url or API_URL
self.request = _Request(url=api_url)
self.token_source_url = token_source_url or api_url + '/token/'
self.ssh_token_source_url = api_url + '/token/'
if token:
self.request.set_token_authorization_header(token)
self.register_addon_url = register_addon_url or api_url
self.encode_email = encode_email
def check_versions(self):
version_request = _Request(url=self.request.url)
content = version_request.get('/.meta/version/')
return json.loads(content)
def requires_token(self):
"""
requires_token checks that methods that require
a token can't be called without a token.
If check_token doesn't return True a TokenRequiredError exception
is raised telling the caller to use the create_token method to get
a valid token.
"""
if not self.check_token():
raise TokenRequiredError
def create_token(self, email, password):
"""
Sends token creation request to API using basic auth - for backwards compatibility
"""
return self.create_token_basic_auth(email, password)
def create_token_basic_auth(self, email, password):
"""
Sends token creation request to API using basic auth
"""
token_request = _Request(url=self.token_source_url)
token_request.set_basic_authorization_header(email, password, self.encode_email)
return self.token_request(token_request)
def create_token_ssh_auth(self, email, ssh_token, signature, fingerprint):
"""
Sends token creation request to API using ssh auth
"""
token_request = _Request(url=self.ssh_token_source_url)
token_request.set_sshtoken_authorization_header(email, ssh_token, signature, fingerprint)
return self.token_request(token_request)
def token_request(self, token_request):
content = token_request.request('', 'POST')
token = json.loads(content)
self.set_token(token)
self.request.set_token_authorization_header(token)
return True
def create_ssh_token(self):
try:
token_request = _Request(url=self.ssh_token_source_url)
token_request.request('', 'POST')
raise APIException('Expected UnauthorizedError has not been raised')
except UnauthorizedError as e:
result = httplib2._parse_www_authenticate(e.response)
try:
ssh_token = result['ccssh']['sshtoken']
except KeyError, TypeError:
raise APIException('SSH token was not created')
if not ssh_token:
raise APIException('Empty SSH token.')
return ssh_token
def check_token(self):
"""
This method checks if there's a token.
"""
if self.request.token:
return True
return False
def set_token(self, token):
"""
We use set_token to set the token.
"""
self._token = token
def get_token(self):
"""
We use get_token to get the token.
"""
return self._token
def create_app(self, app_name, type, repository_type, buildpack_url=None):
"""
Create a new application and return it.
"""
self.requires_token()
resource = '/app/'
data = {'name': app_name,
'type': type,
'repository_type': repository_type}
if buildpack_url:
data['buildpack_url'] = buildpack_url
content = self.request.post(resource, data)
return json.loads(content)
def read_apps(self):
"""
Returns a list of applications.
"""
self.requires_token()
resource = '/app/'
content = self.request.get(resource)
return json.loads(content)
def read_app(self, app_name):
"""
Returns all application details.
"""
self.requires_token()
resource = '/app/%s/' % app_name
content = self.request.get(resource)
return json.loads(content)
def delete_app(self, app_name):
"""
Delete a application.
"""
self.requires_token()
resource = '/app/%s/' % app_name
self.request.delete(resource)
return True
def create_deployment(self, app_name, deployment_name='', stack=None):
"""
Create a new deployment.
deployment_name is optional
"""
self.requires_token()
resource = '/app/%s/deployment/' % app_name
data = {}
if deployment_name:
data['name'] = deployment_name
if stack:
data['stack'] = stack
content = self.request.post(resource, data)
return json.loads(content)
def read_deployment(self, app_name, deployment_name):
"""
Returns all deployment details.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/' % (app_name, deployment_name)
content = self.request.get(resource)
return json.loads(content)
def read_deployment_users(self, app_name, deployment_name):
"""
get a list of the deployment-users
"""
self.requires_token()
resource = '/app/%s/deployment/%s/user/' % (app_name, deployment_name)
content = self.request.get(resource)
return json.loads(content)
def update_deployment(self, app_name, version=-1, deployment_name='',
min_boxes=None, max_boxes=None, billing_account=None,
stack=None):
"""
Updates a deployment.
Use this to deploy new versions. If no version is provided the
last version is deployed.
"""
self.requires_token()
if deployment_name == '':
deployment_name = 'default'
resource = '/app/%s/deployment/%s/' % (app_name, deployment_name)
data = {'version': version}
if min_boxes:
data['min_boxes'] = min_boxes
if max_boxes:
data['max_boxes'] = max_boxes
if billing_account:
data['billing_account'] = billing_account
if stack:
data['stack'] = stack
content = self.request.put(resource, data)
return json.loads(content)
def delete_deployment(self, app_name, deployment_name):
"""
Delete a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/' % (app_name, deployment_name)
self.request.delete(resource)
return True
def create_alias(self, app_name, alias_name, deployment_name):
"""
Add an alias to a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/alias/' % (app_name, deployment_name)
data = {'name': alias_name}
content = self.request.post(resource, data)
return json.loads(content)
def read_aliases(self, app_name=None, deployment_name=None):
"""
Get a list of aliases.
"""
content = None
if app_name and deployment_name:
self.requires_token()
resource = '/app/%s/deployment/%s/alias/' % \
(app_name, deployment_name)
content = self.request.get(resource)
return json.loads(content)
def read_alias(self, app_name, alias_name, deployment_name):
"""
Get all alias details.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/alias/%s/' % \
(app_name, deployment_name, alias_name)
content = self.request.get(resource)
return json.loads(content)
def delete_alias(self, app_name, alias_name, deployment_name):
"""
Remove an alias from a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/alias/%s/' % \
(app_name, deployment_name, alias_name)
self.request.delete(resource)
return True
def create_worker(self, app_name, deployment_name, command, params=None, size=None):
"""
Add an worker to a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/worker/' % \
(app_name, deployment_name)
data = {'command': command}
if params:
data['params'] = params
if size:
data['size'] = size
content = self.request.post(resource, data)
return json.loads(content)
def read_workers(self, app_name=None, deployment_name=None):
"""
Get a list of runnnig workers for a deployment.
"""
content = None
if app_name and deployment_name:
self.requires_token()
resource = '/app/%s/deployment/%s/worker/' % \
(app_name, deployment_name)
content = self.request.get(resource)
return json.loads(content)
def read_worker(self, app_name, deployment_name, wrk_id):
"""
Get all worker details.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/worker/%s/' % \
(app_name, deployment_name, wrk_id)
content = self.request.get(resource)
return json.loads(content)
def delete_worker(self, app_name, deployment_name, wrk_id):
"""
Remove an worker from a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/worker/%s/' % \
(app_name, deployment_name, wrk_id)
self.request.delete(resource)
return True
def create_cronjob(self, app_name, deployment_name, url):
"""
Add an worker to a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/cron/' % (app_name, deployment_name)
data = {'url': url}
content = self.request.post(resource, data)
return json.loads(content)
def read_cronjobs(self, app_name=None, deployment_name=None):
"""
Get a list of cronjobs.
"""
content = None
if app_name and deployment_name:
self.requires_token()
resource = '/app/%s/deployment/%s/cron/' % \
(app_name, deployment_name)
content = self.request.get(resource)
return json.loads(content)
def read_cronjob(self, app_name, deployment_name, job_id):
"""
Get all worker details.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/cron/%s/' % \
(app_name, deployment_name, job_id)
content = self.request.get(resource)
return json.loads(content)
def delete_cronjob(self, app_name, deployment_name, job_id):
"""
Remove an worker from a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/cron/%s/' % \
(app_name, deployment_name, job_id)
self.request.delete(resource)
return True
def register_addon(self, email, password, data):
"""
Register a new addon on the platform.
The addon manifest content needs to be passed via the data argument.
"""
request = _Request(url=self.register_addon_url)
request.set_basic_authorization_header(email, password, encode_email=self.encode_email)
content = request.post('/provider/addons', data, json_data=True)
return json.loads(content)
def create_addon(self, app_name, deployment_name, addon_name, options=None):
"""
Add an addon to a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/addon/' % (app_name, deployment_name)
data = {'addon': addon_name}
if options:
data['options'] = options
content = self.request.post(resource, data)
return json.loads(content)
def read_addons(self, app_name=None, deployment_name=None):
"""
Get a list of addons.
If app_name and deployment_name are None it will return a list
of available addons. Otherwise a list of addons related to that
deployment.
"""
if app_name and deployment_name:
self.requires_token()
resource = '/app/%s/deployment/%s/addon/' % \
(app_name, deployment_name)
content = self.request.get(resource)
else:
resource = '/addon/'
content = self.request.get(resource)
return json.loads(content)
def read_addon(self, app_name, deployment_name, addon_name):
"""
Get all addon details.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/addon/%s/' % \
(app_name, deployment_name, addon_name)
content = self.request.get(resource)
return json.loads(content)
def update_addon(self, app_name, deployment_name, addon_name_current,
addon_name_to_update_to, settings=None, force=False):
self.requires_token()
resource = '/app/%s/deployment/%s/addon/%s/' % \
(app_name, deployment_name, addon_name_current)
data = {'addon': addon_name_to_update_to}
if settings:
data['settings'] = settings
if force:
data['force'] = force
content = self.request.put(resource, data)
return json.loads(content)
def delete_addon(self, app_name, deployment_name, addon_name):
"""
Remove an addon from a deployment.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/addon/%s/' % \
(app_name, deployment_name, addon_name)
self.request.delete(resource)
return True
def read_app_users(self, app_name):
"""
Get a list of app users.
"""
self.requires_token()
resource = '/app/%s/user/' % app_name
content = self.request.get(resource)
return json.loads(content)
def create_app_user(self, app_name, email, role=None):
"""
Add a user to an application.
"""
self.requires_token()
resource = '/app/%s/user/' % app_name
data = {'email': email}
if role:
data['role'] = role
content = self.request.post(resource, data)
return json.loads(content)
def delete_app_user(self, app_name, user_name):
"""
Remove a user from an application.
"""
self.requires_token()
resource = '/app/%s/user/%s/' % (app_name, user_name)
self.request.delete(resource)
return True
def create_deployment_user(self, app_name, deployment_name, email, role=None):
"""
Add a user to an application.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/user/' % (app_name, deployment_name)
data = {'email': email}
if role:
data['role'] = role
content = self.request.post(resource, data)
return json.loads(content)
def delete_deployment_user(self, app_name, deployment_name, user_name):
"""
Remove a user from an application.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/user/%s/' % (app_name, deployment_name, user_name)
self.request.delete(resource)
return True
def read_users(self):
"""
Get a list of users. Usually just your own.
"""
self.requires_token()
resource = '/user/'
content = self.request.get(resource)
return json.loads(content)
def create_user(self, name, email, password):
"""
Create a new user.
"""
resource = '/user/'
user_request = _Request(url=self.request.url)
data = {
'username': name,
'email': email,
'password': password}
content = user_request.post(resource, data)
return json.loads(content)
def read_user(self, user_name):
"""
Get user by user_name.
"""
self.requires_token()
resource = '/user/%s/' % user_name
content = self.request.get(resource)
return json.loads(content)
def update_user(self, user_name, activation_code=None):
"""
Update user by user_name.
Use this for activation after registration.
"""
resource = '/user/%s/' % user_name
if activation_code:
user_request = _Request(url=self.request.url)
data = {'activation_code': activation_code}
user_request.put(resource, data)
else:
# Not implemented yet
return False
return True
def delete_user(self, user_name):
"""
Delete user by user_name.
"""
self.requires_token()
resource = '/user/%s/' % user_name
self.request.delete(resource)
return True
def read_user_keys(self, user_name):
"""
Get a list of keys belonging to user selected by user_name.
"""
self.requires_token()
resource = '/user/%s/key/' % user_name
content = self.request.get(resource)
return json.loads(content)
def read_user_key(self, user_name, key_id):
"""
Get a key by user_name and key_id.
"""
self.requires_token()
resource = '/user/%s/key/%s/' % (user_name, key_id)
content = self.request.get(resource)
return json.loads(content)
def create_user_key(self, user_name, public_key):
"""
Add a key to user by user_name.
"""
self.requires_token()
resource = '/user/%s/key/' % user_name
data = {'key': public_key}
content = self.request.post(resource, data)
return json.loads(content)
def delete_user_key(self, user_name, key_id):
"""
Remove a key from user by user_name.
Requires key_id that can be requested using read_user_keys()
"""
self.requires_token()
resource = '/user/%s/key/%s/' % (user_name, key_id)
self.request.delete(resource)
return True
def read_log(self, app_name, deployment_name, log_type, last_time=None):
"""
Get a deployment's log by log_type.
log_type choices are 'access' or 'error'
last_time is optional format is a Python datetime object or a time struct
"""
self.requires_token()
if last_time:
try:
last_time_tuple = last_time.timetuple()
timestamp = Decimal('{0}.{1}'.format(int(time.mktime(last_time_tuple)), last_time.microsecond))
except (TypeError, AttributeError):
timestamp = calendar.timegm(last_time)
resource = '/app/%s/deployment/%s/log/%s/?timestamp=%s' % \
(app_name, deployment_name, log_type, timestamp)
else:
resource = '/app/%s/deployment/%s/log/%s/' % \
(app_name, deployment_name, log_type)
content = self.request.get(resource)
return json.loads(content)
def create_billing_account(self, userName, billingName, data):
"""
creates a billing account.
"""
self.requires_token()
resource = '/user/%s/billing/%s/' % (userName, billingName)
content = self.request.post(resource, data)
return json.loads(content)
def update_billing_account(self, userName, billingName, data):
"""
updates a billing account
"""
self.requires_token()
resource = '/user/%s/billing/%s/' % (userName, billingName)
content = self.request.put(resource, data)
return json.loads(content)
def get_billing_accounts(self, userName):
"""
return all users billling accounts
"""
self.requires_token()
resource = '/user/%s/billing/' % userName
content = self.request.get(resource)
return json.loads(content)
###
#
# EXCEPTIONS
#
###
class APIException(Exception):
response = None
def __init__(self, message=None, resp=None):
super(Exception, self).__init__(message)
self.response = resp
class ConnectionException(APIException):
"""
We raise this exception if the API was unreachable.
"""
pass
class TokenRequiredError(APIException):
"""
We raise this exception if a method requires a token but self._token
is none.
Use the create_token() method to get a new token.
"""
#noinspection PyMethodOverriding
def __unicode__(self):
return 'No valid token. Use create_token(email, password) to get one'
class BadRequestError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 400
BAD REQUEST.
"""
msgs = {}
#noinspection PyMissingConstructor
def __init__(self, value):
try:
try:
self.msgs = json.loads(value)
except ValueError:
self.msgs = json.loads(value[12:])
except ValueError:
self.msgs = {}
def __str__(self):
msg = ''
for key in self.msgs:
msg = msg + key + ': ' + self.msgs[key] + '\n'
return msg
class UnauthorizedError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 401
UNAUTHORIZED.
"""
pass
class ForbiddenError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 403
FORBIDDEN.
"""
pass
class NotFoundError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 404
NOT FOUND.
"""
pass
class ConflictDuplicateError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 409
DUPLICATE ENTRY.
"""
pass
class GoneError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 410
GONE.
"""
pass
class InternalServerError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 500
INTERNAL SERVER ERROR.
"""
pass
class NotImplementedError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 501
NOT IMPLEMENTED.
"""
pass
class ThrottledError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 503
THROTTLED.
"""
msgs = {}
#noinspection PyMissingConstructor
def __init__(self, value):
self.prefix_with_error = True
try:
try:
self.msgs = json.loads(value)
except ValueError:
self.msgs = json.loads(value[12:])
except ValueError:
self.msgs = {'error': value}
self.prefix_with_error = False
def __str__(self):
msg = ''
for key in self.msgs:
if self.prefix_with_error:
msg += "[ERROR] "
msg += self.msgs[key] + '\n'
return msg[:-1]
class UnprocessableEntityError(APIException):
"""
We raise this exception whenever the API answers with HTTP STATUS 422
UNPROCESSABLE ENTITY.
"""
pass
class BadGatewayError(APIException):
pass
class GatewayTimeoutError(APIException):
pass
###
#
# _Request Class using httplib2 to fire HTTP requests
#
###
class _Request():
"""
_Request is used internally to actually fire API requests. It has some
handy shortcut methods for POST, GET, PUT and DELETE, sets correct
headers for each method, takes care of encoding data and handles all
API errors by throwing exceptions.
"""
token = None
version = None
cache = None
url = None
disable_ssl_check = None
ca_certs = None
def __init__(self, url=API_URL):
"""
When initializing a _Request object decide if ssh auth, token auth or email,
password auth should be used. The class handles both cases
accordingly.
"""
self.version = VERSION
self.cache = CACHE
self.url = url
self.disable_ssl_check = DISABLE_SSL_CHECK
self.ca_certs = CA_CERTS or certifi.where()
self.headers = {}
def post(self, resource, data=None, json_data=False):
if not data:
data = {}
return self.request(resource, method='POST', data=data, json_data=json_data)
def get(self, resource):
return self.request(resource)
def put(self, resource, data=None):
if not data:
data = {}
return self.request(resource, method='PUT', data=data)
def delete(self, resource):
return self.request(resource, method='DELETE')
def set_basic_authorization_header(self, email, password, encode_email=False):
if encode_email:
email = urllib.quote(email)
self.headers['Authorization'] = 'Basic ' + base64.b64encode("%s:%s" % (email, password)).strip()
def set_token_authorization_header(self, token):
self.token = token
self.headers['Authorization'] = 'cc_auth_token="%s"' % (token['token'])
def set_sshtoken_authorization_header(self, email, ssh_token, signature, fingerprint):
auth_string = 'signature={0},fingerprint={1},sshtoken={2},email={3}'.format(
signature, fingerprint, ssh_token, email)
self.headers['Authorization'] = 'ccssh ' + auth_string
def request(self, resource, method='GET', data=None, headers=None, json_data=False):
"""
we use the excellent httplib2 for all the heavy HTTP protocol
lifting.
"""
if headers:
self.headers.update(headers)
url = urlparse(self.url + resource)
h = httplib2.Http()
if self.cache:
h.cache = self.cache
if self.disable_ssl_check:
h.disable_ssl_certificate_validation = self.disable_ssl_check
if self.ca_certs:
h.ca_certs = self.ca_certs
#
# The API expects the body to be url-encoded. If data was passed to
# the request method we therefore use url-encode from urllib.
#
if data is None:
body = ''
else:
if json_data:
body = json.dumps(data)
else:
body = urlencode(data)
#
# We set the Host Header for MacOSX 10.5,
# to circumvent the NotFoundError
#
self.headers['Host'] = url.hostname
#
# We set the User-Agent Header to pycclib and the local version.
# This enables basic statistics about still used pycclib versions in
# the wild.
#
self.headers['User-Agent'] = 'pycclib/%s' % self.version
if method.upper() == 'PUT' or 'POST':
if json_data:
self.headers['Content-Type'] = 'application/json'
else:
self.headers['Content-Type'] = 'application/x-www-form-urlencoded'
#
# We also set the Content-Length and Accept-Encoding headers.
#
self.headers['Content-Length'] = str(len(body))
self.headers['Accept-Encoding'] = 'compress, gzip'
#
# Debug HTTP requests
if DEBUG:
httplib2.debuglevel = DEBUG
#
# Finally we fire the actual request.
#
resp = None
content = None
for i in range(1, 6):
try:
resp, content = h.request(
url.geturl(),
method.upper(),
body=body,
headers=self.headers)
if DEBUG:
print 'DEBUG(resp)>>> {0}'.format(repr(resp))
print 'DEBUG(content)>>> {0}'.format(repr(content))
except (socket.error, AttributeError), e:
# if we could not reach the API we wait 1s and try again
time.sleep(1)
# if we tried for the fifth time we give up - and cry a little
if i == 5:
if DEBUG:
print 'DEBUG(exception)>>> {0}'.format(e)
raise ConnectionException('Could not connect to API...')
except httplib2.SSLHandshakeError:
raise ConnectionException('Certificate verification failed ...')
else:
break
#
# And handle the possible responses according to their HTTP STATUS
# CODES.
#
# 200 OK, 201 CREATED and 204 DELETED result in returning the actual
# response.
#
# All non success STATUS CODES raise an exception containing
# the API error message.
#
if resp.status in [200, 201, 204]:
return content.decode('UTF8')
elif resp.status == 400:
raise BadRequestError(content.decode('UTF8'))
elif resp.status == 401:
raise UnauthorizedError(content.decode('UTF8'), resp)
elif resp.status == 403:
raise ForbiddenError(content.decode('UTF8'))
elif resp.status == 404:
raise NotFoundError()
elif resp.status == 409:
raise ConflictDuplicateError(content.decode('UTF8'))
elif resp.status == 410:
raise GoneError(content.decode('UTF8'))
elif resp.status == 422:
raise UnprocessableEntityError(content.decode('UTF8'))
#
# 500 INTERNAL SERVER ERRORs normally shouldn't happen...
#
elif resp.status == 500:
raise InternalServerError(content.decode('UTF8'))
elif resp.status == 501:
raise NotImplementedError(content.decode('UTF8'))
elif resp.status == 502:
raise BadGatewayError(content.decode('UTF-8'))
elif resp.status == 503:
raise ThrottledError(content.decode('UTF8'))
elif resp.status == 504:
raise GatewayTimeoutError(content.decode('UTF-8'))
else:
raise Exception('Received unexpected response status: %s' % str(resp.status))
| cloudControl/pycclib | pycclib/cclib.py | Python | apache-2.0 | 33,766 | 0.000829 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-02 14:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rstorch', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='category',
name='is_active',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='store',
name='is_active',
field=models.BooleanField(default=False),
),
]
| alevnyaa/restfulstorch | rstorch/migrations/0002_auto_20170302_1722.py | Python | gpl-3.0 | 602 | 0 |
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tests the msvs specific msvs_target_platform option.
"""
import TestGyp
import TestCommon
def RunX64(exe, stdout):
try:
test.run_built_executable(exe, stdout=stdout)
except WindowsError as e:
# Assume the exe is 64-bit if it can't load on 32-bit systems.
# Both versions of the error are required because different versions
# of python seem to return different errors for invalid exe type.
if e.errno != 193 and '[Error 193]' not in str(e):
raise
test = TestGyp.TestGyp(formats=['msvs'])
test.run_gyp('configurations.gyp')
test.set_configuration('Debug|x64')
test.build('configurations.gyp', rebuild=True)
RunX64('front_left', stdout=('left\n'))
RunX64('front_right', stdout=('right\n'))
test.set_configuration('Debug|Win32')
test.build('configurations.gyp', rebuild=True)
RunX64('front_left', stdout=('left\n'))
test.run_built_executable('front_right', stdout=('right\n'))
test.pass_test()
| ibc/MediaSoup | worker/deps/gyp/test/configurations/target_platform/gyptest-target_platform.py | Python | isc | 1,114 | 0.002693 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v9.enums.types import access_role as gage_access_role
from google.ads.googleads.v9.enums.types import (
response_content_type as gage_response_content_type,
)
from google.ads.googleads.v9.resources.types import customer as gagr_customer
from google.protobuf import field_mask_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.services",
marshal="google.ads.googleads.v9",
manifest={
"GetCustomerRequest",
"MutateCustomerRequest",
"CreateCustomerClientRequest",
"CustomerOperation",
"CreateCustomerClientResponse",
"MutateCustomerResponse",
"MutateCustomerResult",
"ListAccessibleCustomersRequest",
"ListAccessibleCustomersResponse",
},
)
class GetCustomerRequest(proto.Message):
r"""Request message for
[CustomerService.GetCustomer][google.ads.googleads.v9.services.CustomerService.GetCustomer].
Attributes:
resource_name (str):
Required. The resource name of the customer
to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
class MutateCustomerRequest(proto.Message):
r"""Request message for
[CustomerService.MutateCustomer][google.ads.googleads.v9.services.CustomerService.MutateCustomer].
Attributes:
customer_id (str):
Required. The ID of the customer being
modified.
operation (google.ads.googleads.v9.services.types.CustomerOperation):
Required. The operation to perform on the
customer
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v9.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operation = proto.Field(
proto.MESSAGE, number=4, message="CustomerOperation",
)
validate_only = proto.Field(proto.BOOL, number=5,)
response_content_type = proto.Field(
proto.ENUM,
number=6,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class CreateCustomerClientRequest(proto.Message):
r"""Request message for
[CustomerService.CreateCustomerClient][google.ads.googleads.v9.services.CustomerService.CreateCustomerClient].
Attributes:
customer_id (str):
Required. The ID of the Manager under whom
client customer is being created.
customer_client (google.ads.googleads.v9.resources.types.Customer):
Required. The new client customer to create.
The resource name on this customer will be
ignored.
email_address (str):
Email address of the user who should be
invited on the created client customer.
Accessible only to customers on the allow-list.
This field is a member of `oneof`_ ``_email_address``.
access_role (google.ads.googleads.v9.enums.types.AccessRoleEnum.AccessRole):
The proposed role of user on the created
client customer. Accessible only to customers on
the allow-list.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(proto.STRING, number=1,)
customer_client = proto.Field(
proto.MESSAGE, number=2, message=gagr_customer.Customer,
)
email_address = proto.Field(proto.STRING, number=5, optional=True,)
access_role = proto.Field(
proto.ENUM, number=4, enum=gage_access_role.AccessRoleEnum.AccessRole,
)
validate_only = proto.Field(proto.BOOL, number=6,)
class CustomerOperation(proto.Message):
r"""A single update on a customer.
Attributes:
update (google.ads.googleads.v9.resources.types.Customer):
Mutate operation. Only updates are supported
for customer.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
"""
update = proto.Field(
proto.MESSAGE, number=1, message=gagr_customer.Customer,
)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class CreateCustomerClientResponse(proto.Message):
r"""Response message for CreateCustomerClient mutate.
Attributes:
resource_name (str):
The resource name of the newly created
customer client.
invitation_link (str):
Link for inviting user to access the created
customer. Accessible to allowlisted customers
only.
"""
resource_name = proto.Field(proto.STRING, number=2,)
invitation_link = proto.Field(proto.STRING, number=3,)
class MutateCustomerResponse(proto.Message):
r"""Response message for customer mutate.
Attributes:
result (google.ads.googleads.v9.services.types.MutateCustomerResult):
Result for the mutate.
"""
result = proto.Field(
proto.MESSAGE, number=2, message="MutateCustomerResult",
)
class MutateCustomerResult(proto.Message):
r"""The result for the customer mutate.
Attributes:
resource_name (str):
Returned for successful operations.
customer (google.ads.googleads.v9.resources.types.Customer):
The mutated customer with only mutable fields after mutate.
The fields will only be returned when response_content_type
is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
customer = proto.Field(
proto.MESSAGE, number=2, message=gagr_customer.Customer,
)
class ListAccessibleCustomersRequest(proto.Message):
r"""Request message for
[CustomerService.ListAccessibleCustomers][google.ads.googleads.v9.services.CustomerService.ListAccessibleCustomers].
"""
class ListAccessibleCustomersResponse(proto.Message):
r"""Response message for
[CustomerService.ListAccessibleCustomers][google.ads.googleads.v9.services.CustomerService.ListAccessibleCustomers].
Attributes:
resource_names (Sequence[str]):
Resource name of customers directly
accessible by the user authenticating the call.
"""
resource_names = proto.RepeatedField(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
| googleads/google-ads-python | google/ads/googleads/v9/services/types/customer_service.py | Python | apache-2.0 | 7,422 | 0.000404 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import errno
import logging
import os
import re
from django.conf import settings
from pootle.core.log import STORE_RESURRECTED, store_log
from pootle.core.utils.timezone import datetime_min
from pootle_app.models.directory import Directory
from pootle_language.models import Language
from pootle_store.models import Store
from pootle_store.util import absolute_real_path, relative_real_path
#: Case insensitive match for language codes
LANGCODE_RE = re.compile('^[a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?$',
re.IGNORECASE)
#: Case insensitive match for language codes as postfix
LANGCODE_POSTFIX_RE = re.compile(
'^.*?[-_.]([a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?)$', re.IGNORECASE)
def direct_language_match_filename(language_code, path_name):
name, ext = os.path.splitext(os.path.basename(path_name))
if name == language_code or name.lower() == language_code.lower():
return True
# Check file doesn't match another language.
if Language.objects.filter(code__iexact=name).count():
return False
detect = LANGCODE_POSTFIX_RE.split(name)
return (len(detect) > 1 and
(detect[1] == language_code or
detect[1].lower() == language_code.lower()))
def match_template_filename(project, filename):
"""Test if :param:`filename` might point at a template file for a given
:param:`project`.
"""
name, ext = os.path.splitext(os.path.basename(filename))
# FIXME: is the test for matching extension redundant?
if ext == os.path.extsep + project.get_template_filetype():
if ext != os.path.extsep + project.localfiletype:
# Template extension is distinct, surely file is a template.
return True
elif not find_lang_postfix(filename):
# File name can't possibly match any language, assume it is a
# template.
return True
return False
def get_matching_language_dirs(project_dir, language):
return [lang_dir for lang_dir in os.listdir(project_dir)
if language.code == lang_dir]
def get_non_existant_language_dir(project_dir, language, file_style,
make_dirs):
if file_style == "gnu":
return project_dir
elif make_dirs:
language_dir = os.path.join(project_dir, language.code)
os.mkdir(language_dir)
return language_dir
else:
raise IndexError("Directory not found for language %s, project %s" %
(language.code, project_dir))
def get_or_make_language_dir(project_dir, language, file_style, make_dirs):
matching_language_dirs = get_matching_language_dirs(project_dir, language)
if len(matching_language_dirs) == 0:
# If no matching directories can be found, check if it is a GNU-style
# project.
return get_non_existant_language_dir(project_dir, language, file_style,
make_dirs)
else:
return os.path.join(project_dir, matching_language_dirs[0])
def get_language_dir(project_dir, language, file_style, make_dirs):
language_dir = os.path.join(project_dir, language.code)
if not os.path.exists(language_dir):
return get_or_make_language_dir(project_dir, language, file_style,
make_dirs)
else:
return language_dir
def get_translation_project_dir(language, project_dir, file_style,
make_dirs=False):
"""Returns the base directory containing translations files for the
project.
:param make_dirs: if ``True``, project and language directories will be
created as necessary.
"""
if file_style == 'gnu':
return project_dir
else:
return get_language_dir(project_dir, language, file_style, make_dirs)
def is_hidden_file(path):
return path[0] == '.'
def split_files_and_dirs(ignored_files, ext, real_dir, file_filter):
files = []
dirs = []
for child_path in [child_path for child_path in os.listdir(real_dir)
if child_path not in ignored_files and
not is_hidden_file(child_path)]:
full_child_path = os.path.join(real_dir, child_path)
if (os.path.isfile(full_child_path) and
full_child_path.endswith(ext) and file_filter(full_child_path)):
files.append(child_path)
elif os.path.isdir(full_child_path):
dirs.append(child_path)
return files, dirs
def add_items(fs_items_set, db_items, create_or_resurrect_db_item, parent):
"""Add/make obsolete the database items to correspond to the filesystem.
:param fs_items_set: items (dirs, files) currently in the filesystem
:param db_items: dict (name, item) of items (dirs, stores) currently in the
database
:create_or_resurrect_db_item: callable that will create a new db item
or resurrect an obsolete db item with a given name and parent.
:parent: parent db directory for the items
:return: list of all items, list of newly added items
:rtype: tuple
"""
items = []
new_items = []
db_items_set = set(db_items)
items_to_delete = db_items_set - fs_items_set
items_to_create = fs_items_set - db_items_set
for name in items_to_delete:
db_items[name].makeobsolete()
if len(items_to_delete) > 0:
parent.update_all_cache()
for vfolder_treeitem in parent.vfolder_treeitems:
vfolder_treeitem.update_all_cache()
for name in db_items_set - items_to_delete:
items.append(db_items[name])
for name in items_to_create:
item = create_or_resurrect_db_item(name)
items.append(item)
new_items.append(item)
try:
item.save()
except Exception:
logging.exception('Error while adding %s', item)
return items, new_items
def create_or_resurrect_store(file, parent, name, translation_project):
"""Create or resurrect a store db item with given name and parent."""
try:
store = Store.objects.get(parent=parent, name=name)
store.obsolete = False
store.file_mtime = datetime_min
if store.last_sync_revision is None:
store.last_sync_revision = store.get_max_unit_revision()
store_log(user='system', action=STORE_RESURRECTED,
path=store.pootle_path, store=store.id)
except Store.DoesNotExist:
store = Store(file=file, parent=parent,
name=name, translation_project=translation_project)
store.mark_all_dirty()
return store
def create_or_resurrect_dir(name, parent):
"""Create or resurrect a directory db item with given name and parent."""
try:
dir = Directory.objects.get(parent=parent, name=name)
dir.obsolete = False
except Directory.DoesNotExist:
dir = Directory(name=name, parent=parent)
dir.mark_all_dirty()
return dir
# TODO: rename function or even rewrite it
def add_files(translation_project, ignored_files, ext, relative_dir, db_dir,
file_filter=lambda _x: True):
podir_path = to_podir_path(relative_dir)
files, dirs = split_files_and_dirs(ignored_files, ext, podir_path,
file_filter)
file_set = set(files)
dir_set = set(dirs)
existing_stores = dict((store.name, store) for store in
db_dir.child_stores.live().exclude(file='')
.iterator())
existing_dirs = dict((dir.name, dir) for dir in
db_dir.child_dirs.live().iterator())
files, new_files = add_items(
file_set,
existing_stores,
lambda name: create_or_resurrect_store(
file=os.path.join(relative_dir, name),
parent=db_dir,
name=name,
translation_project=translation_project,
),
db_dir,
)
db_subdirs, new_db_subdirs = add_items(
dir_set,
existing_dirs,
lambda name: create_or_resurrect_dir(name=name, parent=db_dir),
db_dir,
)
is_empty = len(files) == 0
for db_subdir in db_subdirs:
fs_subdir = os.path.join(relative_dir, db_subdir.name)
_files, _new_files, _is_empty = \
add_files(translation_project, ignored_files, ext, fs_subdir,
db_subdir, file_filter)
files += _files
new_files += _new_files
is_empty &= _is_empty
if is_empty:
db_dir.makeobsolete()
return files, new_files, is_empty
def to_podir_path(path):
path = relative_real_path(path)
return os.path.join(settings.POOTLE_TRANSLATION_DIRECTORY, path)
def find_lang_postfix(filename):
"""Finds the language code at end of a filename."""
name = os.path.splitext(os.path.basename(filename))[0]
if LANGCODE_RE.match(name):
return name
match = LANGCODE_POSTFIX_RE.match(name)
if match:
return match.groups()[0]
for code in Language.objects.values_list('code', flat=True):
if (name.endswith('-'+code) or name.endswith('_'+code) or
name.endswith('.'+code) or
name.lower().endswith('-'+code.lower()) or
name.endswith('_'+code) or name.endswith('.'+code)):
return code
def translation_project_dir_exists(language, project):
"""Tests if there are translation files corresponding to the given
:param:`language` and :param:`project`.
"""
if project.get_treestyle() == "gnu":
# GNU style projects are tricky
if language.code == 'templates':
# Language is template look for template files
for dirpath, dirnames, filenames in os.walk(
project.get_real_path()):
for filename in filenames:
if (project.file_belongs_to_project(filename,
match_templates=True)
and match_template_filename(project, filename)):
return True
else:
# find files with the language name in the project dir
for dirpath, dirnames, filenames in os.walk(
project.get_real_path()):
for filename in filenames:
# FIXME: don't reuse already used file
if (project.file_belongs_to_project(filename,
match_templates=False)
and direct_language_match_filename(language.code,
filename)):
return True
else:
# find directory with the language name in the project dir
try:
dirpath, dirnames, filename = os.walk(
project.get_real_path()).next()
if language.code in dirnames:
return True
except StopIteration:
pass
return False
def init_store_from_template(translation_project, template_store):
"""Initialize a new file for `translation_project` using `template_store`.
"""
if translation_project.file_style == 'gnu':
target_pootle_path, target_path = get_translated_name_gnu(
translation_project, template_store)
else:
target_pootle_path, target_path = get_translated_name(
translation_project, template_store)
# Create the missing directories for the new TP.
target_dir = os.path.dirname(target_path)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
output_file = template_store.file.store
output_file.settargetlanguage(translation_project.language.code)
output_file.savefile(target_path)
def get_translated_name_gnu(translation_project, store):
"""Given a template :param:`store` and a :param:`translation_project` return
target filename.
"""
pootle_path_parts = store.pootle_path.split('/')
pootle_path_parts[1] = translation_project.language.code
pootle_path = '/'.join(pootle_path_parts[:-1])
if not pootle_path.endswith('/'):
pootle_path = pootle_path + '/'
suffix = "%s%s%s" % (translation_project.language.code, os.extsep,
translation_project.project.localfiletype)
# try loading file first
try:
target_store = translation_project.stores.live().get(
parent__pootle_path=pootle_path,
name__iexact=suffix,
)
return (target_store.pootle_path,
target_store.file and target_store.file.path)
except Store.DoesNotExist:
target_store = None
# is this GNU-style with prefix?
use_prefix = (store.parent.child_stores.live().exclude(file="").count() > 1
or translation_project.stores.live().exclude(
name__iexact=suffix, file='').count())
if not use_prefix:
# let's make sure
for tp in translation_project.project.translationproject_set.exclude(
language__code='templates').iterator():
temp_suffix = \
"%s%s%s" % (tp.language.code, os.extsep,
translation_project.project.localfiletype)
if tp.stores.live().exclude(
name__iexact=temp_suffix).exclude(file="").count():
use_prefix = True
break
if use_prefix:
if store.translation_project.language.code == 'templates':
tprefix = os.path.splitext(store.name)[0]
# FIXME: we should detect separator
prefix = tprefix + '-'
else:
prefix = os.path.splitext(store.name)[0][:-len(
store.translation_project.language.code)]
tprefix = prefix[:-1]
try:
target_store = translation_project.stores.live().filter(
parent__pootle_path=pootle_path,
name__in=[
tprefix + '-' + suffix,
tprefix + '_' + suffix,
tprefix + '.' + suffix,
tprefix + '-' + suffix.lower(),
tprefix + '_' + suffix.lower(),
tprefix + '.' + suffix.lower(),
],
)[0]
return (target_store.pootle_path,
target_store.file and target_store.file.path)
except (Store.DoesNotExist, IndexError):
pass
else:
prefix = ""
if store.file:
path_parts = store.file.path.split(os.sep)
name = prefix + suffix
path_parts[-1] = name
pootle_path_parts[-1] = name
else:
path_parts = store.parent.get_real_path().split(os.sep)
path_parts.append(store.name)
return '/'.join(pootle_path_parts), os.sep.join(path_parts)
def get_translated_name(translation_project, store):
name, ext = os.path.splitext(store.name)
if store.file:
path_parts = store.file.name.split(os.sep)
else:
path_parts = store.parent.get_real_path().split(os.sep)
path_parts.append(store.name)
pootle_path_parts = store.pootle_path.split('/')
# Replace language code
path_parts[1] = translation_project.language.code
pootle_path_parts[1] = translation_project.language.code
# Replace extension
path_parts[-1] = "%s.%s" % (name,
translation_project.project.localfiletype)
pootle_path_parts[-1] = \
"%s.%s" % (name, translation_project.project.localfiletype)
return ('/'.join(pootle_path_parts),
absolute_real_path(os.sep.join(path_parts)))
def does_not_exist(path):
if os.path.exists(path):
return False
try:
os.stat(path)
# what the hell?
except OSError as e:
if e.errno == errno.ENOENT:
# explicit no such file or directory
return True
| pavels/pootle | pootle/apps/pootle_app/project_tree.py | Python | gpl-3.0 | 16,304 | 0.000184 |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;rospy;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lscarab_quad".split(';') if "-lscarab_quad" != "" else []
PROJECT_NAME = "scarab_quad"
PROJECT_SPACE_DIR = "/home/monroe/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
| mdkennedy3/Scarab_DCT_Control | catkin_ws/build/Scarabs/scarab/scarab_quad/catkin_generated/pkg.installspace.context.pc.py | Python | gpl-2.0 | 422 | 0.00237 |
from django.conf.urls import include, url
from powerdns.utils import patterns
urlpatterns = patterns(
'',
url(r'', include('dnsaas.api.v1.urls', namespace='default')),
url(r'^v2/', include('dnsaas.api.v2.urls', namespace='v2')),
)
| dominikkowalski/django-powerdns-dnssec | dnsaas/api/urls.py | Python | bsd-2-clause | 246 | 0 |
import sys
import logging
from playback.api import NovaCompute
from cliff.command import Command
def make_target(args):
try:
target = NovaCompute(user=args.user, hosts=args.hosts.split(','), key_filename=args.key_filename,
password=args.password)
except AttributeError:
sys.stderr.write('No hosts found. Please using --hosts param.')
sys.exit(1)
return target
def install(args):
target = make_target(args)
target.install(args.my_ip, args.rabbit_hosts, args.rabbit_user, args.rabbit_pass,
args.auth_uri, args.auth_url, args.nova_pass, args.novncproxy_base_url,
args.glance_api_servers, args.neutron_endpoint, args.neutron_pass, args.rbd_secret_uuid,
args.memcached_servers)
class Install(Command):
"""install nova compute"""
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(Install, self).get_parser(prog_name)
parser.add_argument('--user',
help='the username to connect to the remote host',
action='store', default='ubuntu', dest='user')
parser.add_argument('--hosts',
help='the remote host to connect to ',
action='store', default=None, dest='hosts')
parser.add_argument('-i', '--key-filename',
help='referencing file paths to SSH key files to try when connecting',
action='store', dest='key_filename', default=None)
parser.add_argument('--password',
help='the password used by the SSH layer when connecting to remote hosts',
action='store', dest='password', default=None)
parser.add_argument('--my-ip',
help='the host management ip',
action='store', default=None, dest='my_ip')
parser.add_argument('--rabbit-hosts',
help='rabbit hosts e.g. CONTROLLER1,CONTROLLER2',
action='store', default=None, dest='rabbit_hosts')
parser.add_argument('--rabbit-user',
help='the user for rabbit, default openstack',
action='store', default='openstack', dest='rabbit_user')
parser.add_argument('--rabbit-pass',
help='the password for rabbit openstack user', action='store',
default=None, dest='rabbit_pass')
parser.add_argument('--auth-uri',
help='keystone internal endpoint e.g. http://CONTROLLER_VIP:5000',
action='store', default=None, dest='auth_uri')
parser.add_argument('--auth-url',
help='keystone admin endpoint e.g. http://CONTROLLER_VIP:35357',
action='store', default=None, dest='auth_url')
parser.add_argument('--nova-pass',
help='passowrd for nova user',
action='store', default=None, dest='nova_pass')
parser.add_argument('--novncproxy-base-url',
help='nova vnc proxy base url e.g. http://CONTROLLER_VIP:6080/vnc_auto.html',
action='store', default=None, dest='novncproxy_base_url')
parser.add_argument('--glance-api-servers',
help='glance host e.g. http://CONTROLLER_VIP:9292',
action='store', default=None, dest='glance_api_servers')
parser.add_argument('--neutron-endpoint',
help='neutron endpoint e.g. http://CONTROLLER_VIP:9696',
action='store', default=None, dest='neutron_endpoint')
parser.add_argument('--neutron-pass',
help='the password for neutron user',
action='store', default=None, dest='neutron_pass')
parser.add_argument('--rbd-secret-uuid',
help='ceph rbd secret for nova libvirt',
action='store', default=None, dest='rbd_secret_uuid')
parser.add_argument('--memcached-servers',
help='memcached servers e.g. CONTROLLER1:11211,CONTROLLER2:11211',
action='store', default=None, dest='memcached_servers')
return parser
def take_action(self, parsed_args):
install(parsed_args)
| jiasir/playback | playback/cli/nova_compute.py | Python | mit | 4,590 | 0.003922 |
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from OpenGL.GL import *
import pygame
import os
import sys
from Engine import Engine, Task
from Video import Video
from Audio import Audio
from View import View
from Input import Input, KeyListener, SystemEventListener
from Resource import Resource
from Data import Data
from Server import Server
from Session import ClientSession
from Svg import SvgContext, SvgDrawing, LOW_QUALITY, NORMAL_QUALITY, HIGH_QUALITY
from Debug import DebugLayer
from Language import _
import Network
import Log
import Config
import Dialogs
import Theme
import Version
import Mod
# define configuration keys
Config.define("engine", "tickrate", float, 1.0)
Config.define("engine", "highpriority", bool, True)
Config.define("game", "uploadscores", bool, False, text = _("Upload Highscores"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "uploadurl", str, "http://fretsonfire.sourceforge.net/play")
Config.define("game", "leftymode", bool, False, text = _("Lefty mode"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "tapping", bool, True, text = _("Tappable notes"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "compactlist", bool, False, text = _("Compact song list"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "autopreview", bool, True, text = _("Song auto preview"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "artistsort", bool, False, text = _("Sort by artist"), options = {False: _("No"), True: _("Yes")})
Config.define("video", "fullscreen", bool, False, text = _("Fullscreen Mode"), options = {False: _("No"), True: _("Yes")})
Config.define("video", "multisamples", int, 4, text = _("Antialiasing Quality"), options = {0: _("None"), 2: _("2x"), 4: _("4x"), 6: _("6x"), 8: _("8x")})
Config.define("video", "resolution", str, "640x480")
Config.define("video", "fps", int, 80, text = _("Frames per Second"), options = dict([(n, n) for n in range(1, 120)]))
#Config.define("opengl", "svgquality", int, NORMAL_QUALITY, text = _("SVG Quality"), options = {LOW_QUALITY: _("Low"), NORMAL_QUALITY: _("Normal"), HIGH_QUALITY: _("High")})
Config.define("audio", "frequency", int, 44100, text = _("Sample Frequency"), options = [8000, 11025, 22050, 32000, 44100, 48000])
Config.define("audio", "bits", int, 16, text = _("Sample Bits"), options = [16, 8])
Config.define("audio", "stereo", bool, True)
Config.define("audio", "buffersize", int, 2048, text = _("Buffer Size"), options = [256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536])
Config.define("audio", "delay", int, 100, text = _("A/V delay"), options = dict([(n, n) for n in range(0, 301)]))
Config.define("audio", "screwupvol", float, 0.25, text = _("Screw Up Sounds"), options = {0.0: _("Off"), .25: _("Quiet"), .5: _("Loud"), 1.0: _("Painful")})
Config.define("audio", "guitarvol", float, 1.0, text = _("Guitar Volume"), options = dict([(n / 100.0, "%02d/10" % (n / 9)) for n in range(0, 110, 10)]))
Config.define("audio", "songvol", float, 1.0, text = _("Song Volume"), options = dict([(n / 100.0, "%02d/10" % (n / 9)) for n in range(0, 110, 10)]))
Config.define("audio", "rhythmvol", float, 1.0, text = _("Rhythm Volume"), options = dict([(n / 100.0, "%02d/10" % (n / 9)) for n in range(0, 110, 10)]))
Config.define("video", "fontscale", float, 1.0, text = _("Text scale"), options = dict([(n / 100.0, "%3d%%" % n) for n in range(50, 260, 10)]))
class FullScreenSwitcher(KeyListener):
"""
A keyboard listener that looks for special built-in key combinations,
such as the fullscreen toggle (Alt-Enter).
"""
def __init__(self, engine):
self.engine = engine
self.altStatus = False
def keyPressed(self, key, unicode):
if key == pygame.K_LALT:
self.altStatus = True
elif key == pygame.K_RETURN and self.altStatus:
if not self.engine.toggleFullscreen():
Log.error("Unable to toggle fullscreen mode.")
return True
elif key == pygame.K_d and self.altStatus:
self.engine.setDebugModeEnabled(not self.engine.isDebugModeEnabled())
return True
elif key == pygame.K_g and self.altStatus and self.engine.isDebugModeEnabled():
self.engine.debugLayer.gcDump()
return True
def keyReleased(self, key):
if key == pygame.K_LALT:
self.altStatus = False
class SystemEventHandler(SystemEventListener):
"""
A system event listener that takes care of restarting the game when needed
and reacting to screen resize events.
"""
def __init__(self, engine):
self.engine = engine
def screenResized(self, size):
self.engine.resizeScreen(size[0], size[1])
def restartRequested(self):
self.engine.restart()
def quit(self):
self.engine.quit()
class GameEngine(Engine):
"""The main game engine."""
def __init__(self, config = None):
"""
Constructor.
@param config: L{Config} instance for settings
"""
if not config:
config = Config.load()
self.config = config
fps = self.config.get("video", "fps")
tickrate = self.config.get("engine", "tickrate")
Engine.__init__(self, fps = fps, tickrate = tickrate)
pygame.init()
self.title = _("Frets on Fire")
self.restartRequested = False
self.handlingException = False
self.video = Video(self.title)
self.audio = Audio()
Log.debug("Initializing audio.")
frequency = self.config.get("audio", "frequency")
bits = self.config.get("audio", "bits")
stereo = self.config.get("audio", "stereo")
bufferSize = self.config.get("audio", "buffersize")
self.audio.pre_open(frequency = frequency, bits = bits, stereo = stereo, bufferSize = bufferSize)
pygame.init()
self.audio.open(frequency = frequency, bits = bits, stereo = stereo, bufferSize = bufferSize)
Log.debug("Initializing video.")
width, height = [int(s) for s in self.config.get("video", "resolution").split("x")]
fullscreen = self.config.get("video", "fullscreen")
multisamples = self.config.get("video", "multisamples")
self.video.setMode((width, height), fullscreen = fullscreen, multisamples = multisamples)
# Enable the high priority timer if configured
if self.config.get("engine", "highpriority"):
Log.debug("Enabling high priority timer.")
self.timer.highPriority = True
viewport = glGetIntegerv(GL_VIEWPORT)
h = viewport[3] - viewport[1]
w = viewport[2] - viewport[0]
geometry = (0, 0, w, h)
self.svg = SvgContext(geometry)
self.svg.setRenderingQuality(self.config.get("opengl", "svgquality"))
glViewport(int(viewport[0]), int(viewport[1]), int(viewport[2]), int(viewport[3]))
self.input = Input()
self.view = View(self, geometry)
self.resizeScreen(w, h)
self.resource = Resource(Version.dataPath())
self.server = None
self.sessions = []
self.mainloop = self.loading
# Load game modifications
Mod.init(self)
theme = Config.load(self.resource.fileName("theme.ini"))
Theme.open(theme)
# Make sure we are using the new upload URL
if self.config.get("game", "uploadurl").startswith("http://kempele.fi"):
self.config.set("game", "uploadurl", "http://fretsonfire.sourceforge.net/play")
self.addTask(self.audio, synchronized = False)
self.addTask(self.input, synchronized = False)
self.addTask(self.view)
self.addTask(self.resource, synchronized = False)
self.data = Data(self.resource, self.svg)
self.input.addKeyListener(FullScreenSwitcher(self), priority = True)
self.input.addSystemEventListener(SystemEventHandler(self))
self.debugLayer = None
self.startupLayer = None
self.loadingScreenShown = False
Log.debug("Ready.")
def setStartupLayer(self, startupLayer):
"""
Set the L{Layer} that will be shown when the all
the resources have been loaded. See L{Data}
@param startupLayer: Startup L{Layer}
"""
self.startupLayer = startupLayer
def isDebugModeEnabled(self):
return bool(self.debugLayer)
def setDebugModeEnabled(self, enabled):
"""
Show or hide the debug layer.
@type enabled: bool
"""
if enabled:
self.debugLayer = DebugLayer(self)
else:
self.debugLayer = None
def toggleFullscreen(self):
"""
Toggle between fullscreen and windowed mode.
@return: True on success
"""
if not self.video.toggleFullscreen():
# on windows, the fullscreen toggle kills our textures, se we must restart the whole game
self.input.broadcastSystemEvent("restartRequested")
self.config.set("video", "fullscreen", not self.video.fullscreen)
return True
self.config.set("video", "fullscreen", self.video.fullscreen)
return True
def restart(self):
"""Restart the game."""
if not self.restartRequested:
self.restartRequested = True
self.input.broadcastSystemEvent("restartRequested")
else:
# evilynux - With self.audio.close(), calling self.quit() results in
# a crash. Calling the parent directly as a workaround.
Engine.quit(self)
def quit(self):
self.audio.close()
Engine.quit(self)
def resizeScreen(self, width, height):
"""
Resize the game screen.
@param width: New width in pixels
@param height: New height in pixels
"""
self.view.setGeometry((0, 0, width, height))
self.svg.setGeometry((0, 0, width, height))
def isServerRunning(self):
return bool(self.server)
def startServer(self):
"""Start the game server."""
if not self.server:
Log.debug("Starting server.")
self.server = Server(self)
self.addTask(self.server, synchronized = False)
def connect(self, host):
"""
Connect to a game server.
@param host: Name of host to connect to
@return: L{Session} connected to remote server
"""
Log.debug("Connecting to host %s." % host)
session = ClientSession(self)
session.connect(host)
self.addTask(session, synchronized = False)
self.sessions.append(session)
return session
def stopServer(self):
"""Stop the game server."""
if self.server:
Log.debug("Stopping server.")
self.removeTask(self.server)
self.server = None
def disconnect(self, session):
"""
Disconnect a L{Session}
param session: L{Session} to disconnect
"""
if session in self.sessions:
Log.debug("Disconnecting.")
self.removeTask(session)
self.sessions.remove(session)
def loadSvgDrawing(self, target, name, fileName, textureSize = None):
"""
Load an SVG drawing synchronously.
@param target: An object that will own the drawing
@param name: The name of the attribute the drawing will be assigned to
@param fileName: The name of the file in the data directory
@param textureSize Either None or (x, y), in which case the file will
be rendered to an x by y texture
@return: L{SvgDrawing} instance
"""
return self.data.loadSvgDrawing(target, name, fileName, textureSize)
def loading(self):
"""Loading state loop."""
done = Engine.run(self)
self.clearScreen()
if self.data.essentialResourcesLoaded():
if not self.loadingScreenShown:
self.loadingScreenShown = True
Dialogs.showLoadingScreen(self, self.data.resourcesLoaded)
if self.startupLayer:
self.view.pushLayer(self.startupLayer)
self.mainloop = self.main
self.view.render()
self.video.flip()
return done
def clearScreen(self):
self.svg.clear(*Theme.backgroundColor)
def main(self):
"""Main state loop."""
# Tune the scheduler priority so that transitions are as smooth as possible
if self.view.isTransitionInProgress():
self.boostBackgroundThreads(False)
else:
self.boostBackgroundThreads(True)
done = Engine.run(self)
self.clearScreen()
self.view.render()
if self.debugLayer:
self.debugLayer.render(1.0, True)
self.video.flip()
return done
def run(self):
try:
return self.mainloop()
except KeyboardInterrupt:
sys.exit(0)
except SystemExit:
sys.exit(0)
except Exception, e:
def clearMatrixStack(stack):
try:
glMatrixMode(stack)
for i in range(16):
glPopMatrix()
except:
pass
if self.handlingException:
# A recursive exception is fatal as we can't reliably reset the GL state
sys.exit(1)
self.handlingException = True
Log.error("%s: %s" % (e.__class__, e))
import traceback
traceback.print_exc()
clearMatrixStack(GL_PROJECTION)
clearMatrixStack(GL_MODELVIEW)
Dialogs.showMessage(self, unicode(e))
self.handlingException = False
return True
| fretsonfire/fof-python | src/GameEngine.py | Python | mit | 14,664 | 0.018344 |
def square_of_list(some_list, num):
if len(some_list) < num:
result = 0
if len(some_list) >= num:
result = some_list[num]**num
return result
def gap(some_list):
if any([type(x) == type('a') for x in some_list]):
print('문자열이 있습니다')
filtered_list = [ x for x in some_list if type(x) == type(1) or type(x) == type(1.0)]
return max(filtered_list) - min(filtered_list)
| saintdragon2/python-3-lecture-2015 | homework_checker/civil_hw_personal_list/hw_civil_list_15030011.py | Python | mit | 441 | 0.026005 |
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((11373, 1728.13, 2526.72), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((10259.4, 2429.11, 3723.81), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((8532.69, 2758.2, 2960.41), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((9825.94, 1135.07, 1915.57), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((8864.47, 1070.22, 618.553), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((6980.33, 2671, 607.184), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((6195.69, 4157.38, 303.329), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((6609.67, 3545.04, -210.293), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((5721.24, 5834.87, 871.259), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((6201.03, 7254.05, 117.531), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((5645.43, 8268.96, 1609.79), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((6394.9, 7902.04, 2465.86), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((7011.82, 8106.65, 3867.87), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((8277.34, 7554.86, 3180), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((10001.2, 8459.06, 4337.94), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((11030.8, 7840.88, 7191.3), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((9790.37, 6419.19, 7692.39), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((8981.52, 7521.64, 7751.38), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((8220.56, 7937.02, 6324.34), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((7969.62, 9244.24, 5681.34), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((6422.64, 7890.12, 4392.43), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((7520.07, 8550.61, 5993.54), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((7047.03, 8185.55, 6463.58), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((7378.79, 7775.83, 7641.96), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((8530.81, 7008.83, 7696.77), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((9913.91, 6961.43, 8384.84), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8860.24, 7431.45, 7335.88), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((8090.35, 6968.45, 5332.62), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((6851.21, 7706.77, 5771.66), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((5952.06, 8094.26, 4905.62), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5799.93, 7527.38, 5476.16), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((6126, 7861.56, 3801.22), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((5802.45, 8900.88, 5188.73), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((6338.03, 7969.54, 6239.43), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((7091.35, 8316.54, 6768.38), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((8333.31, 8746.99, 7453.63), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((6988.8, 7884.05, 5598.72), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((7476.25, 8193.36, 7405.65), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((7390.96, 7143.03, 7047.55), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((6904.97, 8626.37, 7274.7), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((5613.73, 8187.52, 6292.66), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((4568.56, 9275.14, 5206.84), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((3976.98, 10301.6, 7563.46), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((3836.02, 9880.54, 5747.76), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((5037.13, 8933.57, 6425.4), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((4504.93, 7641.84, 5010.54), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((3183.5, 6754.03, 6210.95), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((2216.87, 8382.44, 7025.17), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((2138.87, 6825.93, 5977.47), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((2829.52, 5877.95, 4507.91), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((1938.63, 6667.8, 4552.73), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((3329.23, 6425.84, 3600.9), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((4922.93, 5946.57, 2935.68), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((4403.72, 5955.71, 1475.72), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((3630.22, 6008.52, 1582.26), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((3480.44, 5567.3, 3624.98), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((3945.15, 3453.93, 3835.48), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((3717.07, 1650.17, 5600.99), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((3663.76, 1306.4, 6075.99), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((3990.02, 761.7, 5522.78), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((3399.52, 1631.45, 5369.95), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((2713.75, 1178.27, 5209.31), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((3810.28, 2650, 4668.6), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((2777.87, 1076.28, 4581.24), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((1873.05, -660.377, 5160.6), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((932.413, 883.584, 5086.19), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((784.351, 339.071, 6656.75), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((2119.43, 1753.82, 5176.13), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((2093.33, -222.816, 5756.59), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((1527.28, -885.659, 6357.32), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((2286.52, -55.4312, 7428.72), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| batxes/4Cin | SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/SHH_WT_models11702.py | Python | gpl-3.0 | 17,574 | 0.025094 |
# test floating point floor divide and modulus
# it has some tricky corner cases
# pyboard has 32-bit floating point and gives different (but still
# correct) answers for certain combinations of divmod arguments.
def test(x, y):
div, mod = divmod(x, y)
print(div == x // y, mod == x % y, abs(div * y + mod - x) < 1e-6)
test(1.23456, 0.7)
test(-1.23456, 0.7)
test(1.23456, -0.7)
test(-1.23456, -0.7)
a = 1.23456
b = 0.7
test(a, b)
test(a, -b)
test(-a, b)
test(-a, -b)
for i in range(25):
x = (i - 12.5) / 6
for j in range(25):
y = (j - 12.5) / 6
test(x, y)
# test division by zero error
try:
divmod(1.0, 0)
except ZeroDivisionError:
print('ZeroDivisionError')
| martinribelotta/micropython | tests/float/float_divmod_relaxed.py | Python | mit | 705 | 0.001418 |
from p2pool.bitcoin import networks
PARENT = networks.nets['joulecoin']
SHARE_PERIOD = 20 # seconds
CHAIN_LENGTH = 12*60*60//10 # shares
REAL_CHAIN_LENGTH = 12*60*60//10 # shares
TARGET_LOOKBEHIND = 20 # shares
SPREAD = 10 # blocks
IDENTIFIER = 'ac556af4e900ca61'.decode('hex')
PREFIX = '16ac009e4fa655ac'.decode('hex')
P2P_PORT = 7844
MIN_TARGET = 0
MAX_TARGET = 2**256//2**32 - 1
PERSIST = False
WORKER_PORT = 9844
BOOTSTRAP_ADDRS = 'rav3n.dtdns.net pool.hostv.pl p2pool.org solidpool.org'.split(' ')
ANNOUNCE_CHANNEL = '#p2pool-alt'
VERSION_CHECK = lambda v: True
| Rav3nPL/p2pool-rav | p2pool/networks/joulecoin.py | Python | gpl-3.0 | 568 | 0.012324 |
# TODO: Setup
| mjkmoynihan/ReleaseRadar | setup.py | Python | apache-2.0 | 15 | 0.066667 |
import json
from datetime import datetime, timedelta
from flask import abort, Blueprint, current_app, jsonify, request
from functools import wraps
from sqlalchemy.exc import IntegrityError
from os import path
from uuid import UUID
from werkzeug.exceptions import default_exceptions # type: ignore
from db import db
from journalist_app import utils
from models import (Journalist, Reply, Source, Submission, RevokedToken,
LoginThrottledException, InvalidUsernameException,
BadTokenException, WrongPasswordException)
from store import NotEncrypted
TOKEN_EXPIRATION_MINS = 60 * 8
def get_user_object(request):
"""Helper function to use in token_required views that need a user
object
"""
auth_token = request.headers.get('Authorization').split(" ")[1]
user = Journalist.validate_api_token_and_get_user(auth_token)
return user
def token_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
try:
auth_header = request.headers['Authorization']
except KeyError:
return abort(403, 'API token not found in Authorization header.')
if auth_header:
split = auth_header.split(" ")
if len(split) != 2 or split[0] != 'Token':
abort(403, 'Malformed authorization header.')
auth_token = split[1]
else:
auth_token = ''
if not Journalist.validate_api_token_and_get_user(auth_token):
return abort(403, 'API token is invalid or expired.')
return f(*args, **kwargs)
return decorated_function
def get_or_404(model, object_id, column=''):
if column:
result = model.query.filter(column == object_id).one_or_none()
else:
result = model.query.get(object_id)
if result is None:
abort(404)
return result
def make_blueprint(config):
api = Blueprint('api', __name__)
@api.route('/')
def get_endpoints():
endpoints = {'sources_url': '/api/v1/sources',
'current_user_url': '/api/v1/user',
'submissions_url': '/api/v1/submissions',
'replies_url': '/api/v1/replies',
'auth_token_url': '/api/v1/token'}
return jsonify(endpoints), 200
# Before every post, we validate the payload before processing the request
@api.before_request
def validate_data():
if request.method == 'POST':
# flag, star, and logout can have empty payloads
if not request.data:
dataless_endpoints = [
'add_star',
'remove_star',
'flag',
'logout',
]
for endpoint in dataless_endpoints:
if request.endpoint == 'api.' + endpoint:
return
return abort(400, 'malformed request')
# other requests must have valid JSON payload
else:
try:
json.loads(request.data.decode('utf-8'))
except (ValueError):
return abort(400, 'malformed request')
@api.route('/token', methods=['POST'])
def get_token():
creds = json.loads(request.data.decode('utf-8'))
username = creds.get('username', None)
passphrase = creds.get('passphrase', None)
one_time_code = creds.get('one_time_code', None)
if username is None:
return abort(400, 'username field is missing')
if passphrase is None:
return abort(400, 'passphrase field is missing')
if one_time_code is None:
return abort(400, 'one_time_code field is missing')
try:
journalist = Journalist.login(username, passphrase, one_time_code)
token_expiry = datetime.utcnow() + timedelta(
seconds=TOKEN_EXPIRATION_MINS * 60)
response = jsonify({
'token': journalist.generate_api_token(expiration=TOKEN_EXPIRATION_MINS * 60),
'expiration': token_expiry.isoformat() + 'Z',
'journalist_uuid': journalist.uuid,
})
# Update access metadata
journalist.last_access = datetime.utcnow()
db.session.add(journalist)
db.session.commit()
return response, 200
except (LoginThrottledException, InvalidUsernameException,
BadTokenException, WrongPasswordException):
return abort(403, 'Token authentication failed.')
@api.route('/sources', methods=['GET'])
@token_required
def get_all_sources():
sources = Source.query.filter_by(pending=False).all()
return jsonify(
{'sources': [source.to_json() for source in sources]}), 200
@api.route('/sources/<source_uuid>', methods=['GET', 'DELETE'])
@token_required
def single_source(source_uuid):
if request.method == 'GET':
source = get_or_404(Source, source_uuid, column=Source.uuid)
return jsonify(source.to_json()), 200
elif request.method == 'DELETE':
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.delete_collection(source.filesystem_id)
return jsonify({'message': 'Source and submissions deleted'}), 200
@api.route('/sources/<source_uuid>/add_star', methods=['POST'])
@token_required
def add_star(source_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.make_star_true(source.filesystem_id)
db.session.commit()
return jsonify({'message': 'Star added'}), 201
@api.route('/sources/<source_uuid>/remove_star', methods=['DELETE'])
@token_required
def remove_star(source_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.make_star_false(source.filesystem_id)
db.session.commit()
return jsonify({'message': 'Star removed'}), 200
@api.route('/sources/<source_uuid>/flag', methods=['POST'])
@token_required
def flag(source_uuid):
source = get_or_404(Source, source_uuid,
column=Source.uuid)
source.flagged = True
db.session.commit()
return jsonify({'message': 'Source flagged for reply'}), 200
@api.route('/sources/<source_uuid>/submissions', methods=['GET'])
@token_required
def all_source_submissions(source_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
return jsonify(
{'submissions': [submission.to_json() for
submission in source.submissions]}), 200
@api.route('/sources/<source_uuid>/submissions/<submission_uuid>/download', # noqa
methods=['GET'])
@token_required
def download_submission(source_uuid, submission_uuid):
get_or_404(Source, source_uuid, column=Source.uuid)
submission = get_or_404(Submission, submission_uuid,
column=Submission.uuid)
# Mark as downloaded
submission.downloaded = True
db.session.commit()
return utils.serve_file_with_etag(submission)
@api.route('/sources/<source_uuid>/replies/<reply_uuid>/download',
methods=['GET'])
@token_required
def download_reply(source_uuid, reply_uuid):
get_or_404(Source, source_uuid, column=Source.uuid)
reply = get_or_404(Reply, reply_uuid, column=Reply.uuid)
return utils.serve_file_with_etag(reply)
@api.route('/sources/<source_uuid>/submissions/<submission_uuid>',
methods=['GET', 'DELETE'])
@token_required
def single_submission(source_uuid, submission_uuid):
if request.method == 'GET':
source = get_or_404(Source, source_uuid, column=Source.uuid)
submission = get_or_404(Submission, submission_uuid,
column=Submission.uuid)
return jsonify(submission.to_json()), 200
elif request.method == 'DELETE':
submission = get_or_404(Submission, submission_uuid,
column=Submission.uuid)
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.delete_file(source.filesystem_id, submission.filename,
submission)
return jsonify({'message': 'Submission deleted'}), 200
@api.route('/sources/<source_uuid>/replies', methods=['GET', 'POST'])
@token_required
def all_source_replies(source_uuid):
if request.method == 'GET':
source = get_or_404(Source, source_uuid, column=Source.uuid)
return jsonify(
{'replies': [reply.to_json() for
reply in source.replies]}), 200
elif request.method == 'POST':
source = get_or_404(Source, source_uuid,
column=Source.uuid)
if request.json is None:
abort(400, 'please send requests in valid JSON')
if 'reply' not in request.json:
abort(400, 'reply not found in request body')
user = get_user_object(request)
data = request.json
if not data['reply']:
abort(400, 'reply should not be empty')
source.interaction_count += 1
try:
filename = current_app.storage.save_pre_encrypted_reply(
source.filesystem_id,
source.interaction_count,
source.journalist_filename,
data['reply'])
except NotEncrypted:
return jsonify(
{'message': 'You must encrypt replies client side'}), 400
# issue #3918
filename = path.basename(filename)
reply = Reply(user, source, filename)
reply_uuid = data.get('uuid', None)
if reply_uuid is not None:
# check that is is parseable
try:
UUID(reply_uuid)
except ValueError:
abort(400, "'uuid' was not a valid UUID")
reply.uuid = reply_uuid
try:
db.session.add(reply)
db.session.add(source)
db.session.commit()
except IntegrityError as e:
db.session.rollback()
if 'UNIQUE constraint failed: replies.uuid' in str(e):
abort(409, 'That UUID is already in use.')
else:
raise e
return jsonify({'message': 'Your reply has been stored',
'uuid': reply.uuid,
'filename': reply.filename}), 201
@api.route('/sources/<source_uuid>/replies/<reply_uuid>',
methods=['GET', 'DELETE'])
@token_required
def single_reply(source_uuid, reply_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
reply = get_or_404(Reply, reply_uuid, column=Reply.uuid)
if request.method == 'GET':
return jsonify(reply.to_json()), 200
elif request.method == 'DELETE':
utils.delete_file(source.filesystem_id, reply.filename,
reply)
return jsonify({'message': 'Reply deleted'}), 200
@api.route('/submissions', methods=['GET'])
@token_required
def get_all_submissions():
submissions = Submission.query.all()
return jsonify({'submissions': [submission.to_json() for
submission in submissions]}), 200
@api.route('/replies', methods=['GET'])
@token_required
def get_all_replies():
replies = Reply.query.all()
return jsonify(
{'replies': [reply.to_json() for reply in replies]}), 200
@api.route('/user', methods=['GET'])
@token_required
def get_current_user():
user = get_user_object(request)
return jsonify(user.to_json()), 200
@api.route('/logout', methods=['POST'])
@token_required
def logout():
user = get_user_object(request)
auth_token = request.headers.get('Authorization').split(" ")[1]
revoked_token = RevokedToken(token=auth_token, journalist_id=user.id)
db.session.add(revoked_token)
db.session.commit()
return jsonify({'message': 'Your token has been revoked.'}), 200
def _handle_api_http_exception(error):
# Workaround for no blueprint-level 404/5 error handlers, see:
# https://github.com/pallets/flask/issues/503#issuecomment-71383286
response = jsonify({'error': error.name,
'message': error.description})
return response, error.code
for code in default_exceptions:
api.errorhandler(code)(_handle_api_http_exception)
return api
| ehartsuyker/securedrop | securedrop/journalist_app/api.py | Python | agpl-3.0 | 13,022 | 0.000077 |
import time
import RPi.GPIO as GPIO
# use Broadcom pin numbers
GPIO.setmode(GPIO.BCM)
SERVO_PIN = 3
GPIO.setup(SERVO_PIN, GPIO.OUT)
# setup PWM
pwm = GPIO.PWM(SERVO_PIN, 100)
pwm.start(5)
for i in range(5, 25):
pwm.ChangeDutyCycle(i)
time.sleep(0.5)
pwm.stop()
GPIO.cleanup()
| lukaszo/rpitips-examples | RPi.GPIO/servo.py | Python | apache-2.0 | 288 | 0.010417 |
"""
Tests for OEShape color utilities.
"""
import numpy as np
import unittest
from openeye.oechem import *
from openeye.oeshape import *
from ..color import ColorForceField
class TestColorForceField(unittest.TestCase):
"""
Tests for ColorForceField.
"""
def setUp(self):
"""
Set up tests.
"""
self.color_ff = ColorForceField()
self.color_ff.Init(OEColorFFType_ImplicitMillsDean)
def test_get_interactions(self):
"""
Test ColorForceField.get_interactions.
"""
interactions = self.color_ff.get_interactions()
assert len(interactions) == 6
for (a_type, b_type, decay, weight, radius) in interactions:
assert a_type == b_type
assert decay == 'gaussian'
assert weight < 0
assert radius > 0
def test_get_string(self):
"""
Test ColorForceField.get_string.
"""
ifs = oeisstream(self.color_ff.get_string())
color_ff = ColorForceField()
color_ff.Init(ifs)
for a_interaction, b_interaction in zip(
color_ff.get_interactions(), self.color_ff.get_interactions()):
assert np.array_equal(a_interaction, b_interaction)
def test_isolate_interactions(self):
"""
Test ColorForceField.isolate_interactions.
"""
interactions = set()
for color_ff in self.color_ff.isolate_interactions():
assert len(color_ff.get_interactions()) == 1
for interaction in color_ff.get_interactions():
interactions.add(interaction)
assert interactions == set(self.color_ff.get_interactions())
| skearnes/color-features | oe_utils/shape/tests/test_color.py | Python | bsd-3-clause | 1,693 | 0 |
from django.test import TestCase
from django.core.urlresolvers import reverse
from working_waterfronts.working_waterfronts_api.models import Video
from django.contrib.auth.models import User
class EditVideoTestCase(TestCase):
"""
Test that the Edit Video page works as expected.
Things tested:
URLs reverse correctly
The outputted page has the correct form fields
POSTing "correct" data will result in the update of the video
object with the specified ID
"""
fixtures = ['test_fixtures']
def setUp(self):
user = User.objects.create_user(
'temporary', 'temporary@gmail.com', 'temporary')
user.save()
response = self.client.login(
username='temporary', password='temporary')
self.assertEqual(response, True)
def test_not_logged_in(self):
self.client.logout()
response = self.client.get(
reverse('edit-video', kwargs={'id': '1'}))
self.assertRedirects(response, '/login?next=/entry/videos/1')
def test_url_endpoint(self):
url = reverse('edit-video', kwargs={'id': '1'})
self.assertEqual(url, '/entry/videos/1')
def test_successful_video_update(self):
"""
POST a proper "update video" command to the server, and see if
the update appears in the database
"""
# Data that we'll post to the server to get the new video created
new_video = {
'caption': "A thrilling display of utmost might",
'name': "You won't believe number 3!",
'video': 'http://www.youtube.com/watch?v=dQw4w9WgXcQ'}
self.client.post(
reverse('edit-video', kwargs={'id': '1'}),
new_video)
video = Video.objects.get(id=1)
for field in new_video:
self.assertEqual(
getattr(video, field), new_video[field])
def test_form_fields(self):
"""
Tests to see if the form contains all of the right fields
"""
response = self.client.get(
reverse('edit-video', kwargs={'id': '1'}))
fields = {
'name': 'A Starship',
'caption': "Traveling at the speed of light!",
'video': 'http://www.youtube.com/watch?v=efgDdSWDg0g'
}
form = response.context['video_form']
for field in fields:
self.assertEqual(fields[field], form[field].value())
def test_delete_video(self):
"""
Tests that DELETing entry/videos/<id> deletes the item
"""
response = self.client.delete(
reverse('edit-video', kwargs={'id': '2'}))
self.assertEqual(response.status_code, 200)
with self.assertRaises(Video.DoesNotExist):
Video.objects.get(id=2)
response = self.client.delete(
reverse('edit-video', kwargs={'id': '2'}))
self.assertEqual(response.status_code, 404)
| osu-cass/working-waterfronts-api | working_waterfronts/working_waterfronts_api/tests/views/entry/test_edit_video.py | Python | apache-2.0 | 2,965 | 0 |
class WallsGate(object):
def dfs(self, rooms):
queue = [(i, j, 0) for i, rows in enumerate(rooms) for j, v in enumerate(rows) if not v]
while queue:
i, j, step = queue.pop()
if rooms[i][j] > step:
rooms[i][j] = step
for newi, newj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):
if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and step < rooms[newi][newj]:
queue.append((newi, newj, step + 1))
def bfs(self, rooms):
row=len(rooms)
col=len(rooms[0])
queue=[]
for i in xrange(row):
for j in xrange(col):
if rooms[i][j]==0:
queue.append(i*col+j)
while queue:
x=queue.pop(0)
i,j=x/col,x%col
for newi,newj in (i+1,j),(i-1,j),(i,j+1),(i,j-1):
if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and rooms[newi][newj]==INF:
rooms[newi][newj]=rooms[i][j]+1
queue.append(newi*col+newj)
def naivedfs(self, rooms):
for i in xrange(len(rooms)):
for j in xrange(len(rooms[0])):
if rooms[i][j]==0:
self._dfsrev(rooms,i,j)
def _dfsrev(self,rooms,i,j):
for newi,newj in (i+1,j),(i-1,j),(i,j+1),(i,j-1):
if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and rooms[newi][newj]<rooms[i][j]:
rooms[newi][newj]=rooms[i][j]+1
self._dfsrev(rooms,newi,newi)
| Tanych/CodeTracking | distance.py | Python | mit | 1,572 | 0.027354 |
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Builds Kubeflow Pipelines component and pipeline for sentiment analysis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import kfp.compiler
import kfp.dsl
import kfp.gcp
class SentimentAnalysisOp(kfp.dsl.ContainerOp):
"""Defines the operation."""
def __init__(self,
name,
project,
gcp_temp_location,
input_path,
output_path,
window,
period):
super(SentimentAnalysisOp, self).__init__(
name=name,
image='gcr.io/rostam-193618/sentiment-analysis:latest',
command=[
'mvn', 'compile', 'exec:java',
'-Dexec.mainClass=com.google.cloud.pso.pipelines.SentimentAnalysis',
'-Dexec.cleanupDaemonThreads=false'
],
# file_outputs={'blobs': '/blobs.txt'},
arguments=[
'-Dexec.args=--project={} \
--runner=DataflowRunner \
--gcpTempLocation={} \
--inputPath={} \
--outputPath={} \
--windowDuration={} \
--windowPeriod={}'.format(
str(project),
str(gcp_temp_location),
str(input_path),
str(output_path),
str(window),
str(period)),
]
)
@kfp.dsl.pipeline(
name='Sentiment analysis',
description='Analyzes the sentiments of NYTimes front page headlines.'
)
def pipeline_func(
project=kfp.dsl.PipelineParam('project', value='<PROJECT_ID>'),
gcp_temp_location=kfp.dsl.PipelineParam(
'runner', value='gs://<BUCKET_NAME>/tmp'),
input_path=kfp.dsl.PipelineParam(
'path', value='gs://<BUCKET_NAME>/<NYTIMES-ARCHIVE-API-JSON-FILE(S)>'),
output_path=kfp.dsl.PipelineParam(
'path', value='gs://<BUCKET_NAME>/output/output'),
window=kfp.dsl.PipelineParam('window', value=280),
period=kfp.dsl.PipelineParam('period', value=1)):
"""Defines the pipeline."""
sentiment_analysis_task = SentimentAnalysisOp(
'SentimentAnalysis',
project, # To authenticate.
gcp_temp_location,
input_path,
output_path,
window, period).apply(
kfp.gcp.use_gcp_secret()) # To apply gcp service account secret.
if __name__ == '__main__':
"""Compiles the pipeline to a file."""
filename = 'sentiment_analysis{dt:%Y%m%d_%H%M%S}.pipeline.tar.gz'.format(
dt=datetime.datetime.now())
filepath = './{}'.format(filename)
kfp.compiler.Compiler().compile(pipeline_func, filepath)
| GoogleCloudPlatform/professional-services | examples/kubeflow-pipelines-sentiment-analysis/pipeline/build_sentiment_analysis.py | Python | apache-2.0 | 3,173 | 0.003467 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Count statistics about SCons test and source files. This must be run
# against a fully-populated tree (for example, one that's been freshly
# checked out).
#
# A test file is anything under the src/ directory that begins with
# 'test_' or ends in 'Tests.py', or anything under the test/ directory
# that ends in '.py'. Note that runtest.py script does *not*, by default,
# consider the files that begin with 'test_' to be tests, because they're
# tests of SCons packaging and installation, not functional tests of
# SCons code.
#
# A source file is anything under the src/engine/ or src/script/
# directories that ends in '.py' but does NOT begin with 'test_'
# or end in 'Tests.py'.
#
# We report the number of tests and sources, the total number of lines
# in each category, the number of non-blank lines, and the number of
# non-comment lines. The last figure (non-comment) lines is the most
# interesting one for most purposes.
from __future__ import division, print_function
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os.path
fmt = "%-16s %5s %7s %9s %11s %11s"
class Collection(object):
def __init__(self, name, files=None, pred=None):
self._name = name
if files is None:
files = []
self.files = files
if pred is None:
pred = lambda x: True
self.pred = pred
def __call__(self, fname):
return self.pred(fname)
def __len__(self):
return len(self.files)
def collect(self, directory):
for dirpath, dirnames, filenames in os.walk(directory):
try: dirnames.remove('.svn')
except ValueError: pass
self.files.extend([ os.path.join(dirpath, f)
for f in filenames if self.pred(f) ])
def lines(self):
try:
return self._lines
except AttributeError:
self._lines = lines = []
for file in self.files:
file_lines = open(file).readlines()
lines.extend([s.lstrip() for s in file_lines])
return lines
def non_blank(self):
return [s for s in self.lines() if s != '']
def non_comment(self):
return [s for s in self.lines() if s == '' or s[0] != '#']
def non_blank_non_comment(self):
return [s for s in self.lines() if s != '' and s[0] != '#']
def printables(self):
return (self._name + ':',
len(self.files),
len(self.lines()),
len(self.non_blank()),
len(self.non_comment()),
len(self.non_blank_non_comment()))
def is_Tests_py(x):
return x[-8:] == 'Tests.py'
def is_test_(x):
return x[:5] == 'test_'
def is_python(x):
return x[-3:] == '.py'
def is_source(x):
return is_python(x) and not is_Tests_py(x) and not is_test_(x)
src_Tests_py_tests = Collection('src/ *Tests.py', pred=is_Tests_py)
src_test_tests = Collection('src/ test_*.py', pred=is_test_)
test_tests = Collection('test/ tests', pred=is_python)
sources = Collection('sources', pred=is_source)
src_Tests_py_tests.collect('src')
src_test_tests.collect('src')
test_tests.collect('test')
sources.collect('src/engine')
sources.collect('src/script')
src_tests = Collection('src/ tests', src_Tests_py_tests.files
+ src_test_tests.files)
all_tests = Collection('all tests', src_tests.files + test_tests.files)
def ratio(over, under):
return "%.2f" % (float(len(over)) / float(len(under)))
print(fmt % ('', '', '', '', '', 'non-blank'))
print(fmt % ('', 'files', 'lines', 'non-blank', 'non-comment', 'non-comment'))
print()
print(fmt % src_Tests_py_tests.printables())
print(fmt % src_test_tests.printables())
print()
print(fmt % src_tests.printables())
print(fmt % test_tests.printables())
print()
print(fmt % all_tests.printables())
print(fmt % sources.printables())
print()
print(fmt % ('ratio:',
ratio(all_tests, sources),
ratio(all_tests.lines(), sources.lines()),
ratio(all_tests.non_blank(), sources.non_blank()),
ratio(all_tests.non_comment(), sources.non_comment()),
ratio(all_tests.non_blank_non_comment(),
sources.non_blank_non_comment())
))
| timj/scons | bin/linecount.py | Python | mit | 4,164 | 0.010327 |
from numpy.linalg import cholesky
from scipy.spatial.distance import euclidean
from sklearn.base import BaseEstimator
from sklearn.utils.validation import _is_arraylike
from sklearn.metrics import roc_auc_score
import numpy as np
from abc import ABCMeta, abstractmethod
import six
from ._util import ArrayIndexer, check_input, validate_vector
import warnings
class BaseMetricLearner(six.with_metaclass(ABCMeta, BaseEstimator)):
def __init__(self, preprocessor=None):
"""
Parameters
----------
preprocessor : array-like, shape=(n_samples, n_features) or callable
The preprocessor to call to get tuples from indices. If array-like,
tuples will be gotten like this: X[indices].
"""
self.preprocessor = preprocessor
@abstractmethod
def score_pairs(self, pairs):
"""Returns the score between pairs
(can be a similarity, or a distance/metric depending on the algorithm)
Parameters
----------
pairs : `numpy.ndarray`, shape=(n_samples, 2, n_features)
3D array of pairs.
Returns
-------
scores: `numpy.ndarray` of shape=(n_pairs,)
The score of every pair.
See Also
--------
get_metric : a method that returns a function to compute the metric between
two points. The difference with `score_pairs` is that it works on two 1D
arrays and cannot use a preprocessor. Besides, the returned function is
independent of the metric learner and hence is not modified if the metric
learner is.
"""
def check_preprocessor(self):
"""Initializes the preprocessor"""
if _is_arraylike(self.preprocessor):
self.preprocessor_ = ArrayIndexer(self.preprocessor)
elif callable(self.preprocessor) or self.preprocessor is None:
self.preprocessor_ = self.preprocessor
else:
raise ValueError("Invalid type for the preprocessor: {}. You should "
"provide either None, an array-like object, "
"or a callable.".format(type(self.preprocessor)))
def _prepare_inputs(self, X, y=None, type_of_inputs='classic',
**kwargs):
"""Initializes the preprocessor and processes inputs. See `check_input`
for more details.
Parameters
----------
input: array-like
The input data array to check.
y : array-like
The input labels array to check.
type_of_inputs: `str` {'classic', 'tuples'}
The type of inputs to check. If 'classic', the input should be
a 2D array-like of points or a 1D array like of indicators of points. If
'tuples', the input should be a 3D array-like of tuples or a 2D
array-like of indicators of tuples.
**kwargs: dict
Arguments to pass to check_input.
Returns
-------
X : `numpy.ndarray`
The checked input data array.
y: `numpy.ndarray` (optional)
The checked input labels array.
"""
self.check_preprocessor()
return check_input(X, y,
type_of_inputs=type_of_inputs,
preprocessor=self.preprocessor_,
estimator=self,
tuple_size=getattr(self, '_tuple_size', None),
**kwargs)
@abstractmethod
def get_metric(self):
"""Returns a function that takes as input two 1D arrays and outputs the
learned metric score on these two points.
This function will be independent from the metric learner that learned it
(it will not be modified if the initial metric learner is modified),
and it can be directly plugged into the `metric` argument of
scikit-learn's estimators.
Returns
-------
metric_fun : function
The function described above.
Examples
--------
.. doctest::
>>> from metric_learn import NCA
>>> from sklearn.datasets import make_classification
>>> from sklearn.neighbors import KNeighborsClassifier
>>> nca = NCA()
>>> X, y = make_classification()
>>> nca.fit(X, y)
>>> knn = KNeighborsClassifier(metric=nca.get_metric())
>>> knn.fit(X, y) # doctest: +NORMALIZE_WHITESPACE
KNeighborsClassifier(algorithm='auto', leaf_size=30,
metric=<function MahalanobisMixin.get_metric.<locals>.metric_fun
at 0x...>,
metric_params=None, n_jobs=None, n_neighbors=5, p=2,
weights='uniform')
See Also
--------
score_pairs : a method that returns the metric score between several pairs
of points. Unlike `get_metric`, this is a method of the metric learner
and therefore can change if the metric learner changes. Besides, it can
use the metric learner's preprocessor, and works on concatenated arrays.
"""
class MetricTransformer(six.with_metaclass(ABCMeta)):
@abstractmethod
def transform(self, X):
"""Applies the metric transformation.
Parameters
----------
X : (n x d) matrix
Data to transform.
Returns
-------
transformed : (n x d) matrix
Input data transformed to the metric space by :math:`XL^{\\top}`
"""
class MahalanobisMixin(six.with_metaclass(ABCMeta, BaseMetricLearner,
MetricTransformer)):
"""Mahalanobis metric learning algorithms.
Algorithm that learns a Mahalanobis (pseudo) distance :math:`d_M(x, x')`,
defined between two column vectors :math:`x` and :math:`x'` by: :math:`d_M(x,
x') = \sqrt{(x-x')^T M (x-x')}`, where :math:`M` is a learned symmetric
positive semi-definite (PSD) matrix. The metric between points can then be
expressed as the euclidean distance between points embedded in a new space
through a linear transformation. Indeed, the above matrix can be decomposed
into the product of two transpose matrices (through SVD or Cholesky
decomposition): :math:`d_M(x, x')^2 = (x-x')^T M (x-x') = (x-x')^T L^T L
(x-x') = (L x - L x')^T (L x- L x')`
Attributes
----------
transformer_ : `numpy.ndarray`, shape=(num_dims, n_features)
The learned linear transformation ``L``.
"""
def score_pairs(self, pairs):
"""Returns the learned Mahalanobis distance between pairs.
This distance is defined as: :math:`d_M(x, x') = \sqrt{(x-x')^T M (x-x')}`
where ``M`` is the learned Mahalanobis matrix, for every pair of points
``x`` and ``x'``. This corresponds to the euclidean distance between
embeddings of the points in a new space, obtained through a linear
transformation. Indeed, we have also: :math:`d_M(x, x') = \sqrt{(x_e -
x_e')^T (x_e- x_e')}`, with :math:`x_e = L x` (See
:class:`MahalanobisMixin`).
Parameters
----------
pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2)
3D Array of pairs to score, with each row corresponding to two points,
for 2D array of indices of pairs if the metric learner uses a
preprocessor.
Returns
-------
scores: `numpy.ndarray` of shape=(n_pairs,)
The learned Mahalanobis distance for every pair.
See Also
--------
get_metric : a method that returns a function to compute the metric between
two points. The difference with `score_pairs` is that it works on two 1D
arrays and cannot use a preprocessor. Besides, the returned function is
independent of the metric learner and hence is not modified if the metric
learner is.
:ref:`mahalanobis_distances` : The section of the project documentation
that describes Mahalanobis Distances.
"""
pairs = check_input(pairs, type_of_inputs='tuples',
preprocessor=self.preprocessor_,
estimator=self, tuple_size=2)
pairwise_diffs = self.transform(pairs[:, 1, :] - pairs[:, 0, :])
# (for MahalanobisMixin, the embedding is linear so we can just embed the
# difference)
return np.sqrt(np.sum(pairwise_diffs**2, axis=-1))
def transform(self, X):
"""Embeds data points in the learned linear embedding space.
Transforms samples in ``X`` into ``X_embedded``, samples inside a new
embedding space such that: ``X_embedded = X.dot(L.T)``, where ``L`` is
the learned linear transformation (See :class:`MahalanobisMixin`).
Parameters
----------
X : `numpy.ndarray`, shape=(n_samples, n_features)
The data points to embed.
Returns
-------
X_embedded : `numpy.ndarray`, shape=(n_samples, num_dims)
The embedded data points.
"""
X_checked = check_input(X, type_of_inputs='classic', estimator=self,
preprocessor=self.preprocessor_,
accept_sparse=True)
return X_checked.dot(self.transformer_.T)
def get_metric(self):
transformer_T = self.transformer_.T.copy()
def metric_fun(u, v, squared=False):
"""This function computes the metric between u and v, according to the
previously learned metric.
Parameters
----------
u : array-like, shape=(n_features,)
The first point involved in the distance computation.
v : array-like, shape=(n_features,)
The second point involved in the distance computation.
squared : `bool`
If True, the function will return the squared metric between u and
v, which is faster to compute.
Returns
-------
distance: float
The distance between u and v according to the new metric.
"""
u = validate_vector(u)
v = validate_vector(v)
transformed_diff = (u - v).dot(transformer_T)
dist = np.dot(transformed_diff, transformed_diff.T)
if not squared:
dist = np.sqrt(dist)
return dist
return metric_fun
get_metric.__doc__ = BaseMetricLearner.get_metric.__doc__
def metric(self):
# TODO: remove this method in version 0.6.0
warnings.warn(("`metric` is deprecated since version 0.5.0 and will be "
"removed in 0.6.0. Use `get_mahalanobis_matrix` instead."),
DeprecationWarning)
return self.get_mahalanobis_matrix()
def get_mahalanobis_matrix(self):
"""Returns a copy of the Mahalanobis matrix learned by the metric learner.
Returns
-------
M : `numpy.ndarray`, shape=(n_components, n_features)
The copy of the learned Mahalanobis matrix.
"""
return self.transformer_.T.dot(self.transformer_)
class _PairsClassifierMixin(BaseMetricLearner):
_tuple_size = 2 # number of points in a tuple, 2 for pairs
def predict(self, pairs):
"""Predicts the learned metric between input pairs. (For now it just
calls decision function).
Returns the learned metric value between samples in every pair. It should
ideally be low for similar samples and high for dissimilar samples.
Parameters
----------
pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2)
3D Array of pairs to predict, with each row corresponding to two
points, or 2D array of indices of pairs if the metric learner uses a
preprocessor.
Returns
-------
y_predicted : `numpy.ndarray` of floats, shape=(n_constraints,)
The predicted learned metric value between samples in every pair.
"""
return self.decision_function(pairs)
def decision_function(self, pairs):
"""Returns the learned metric between input pairs.
Returns the learned metric value between samples in every pair. It should
ideally be low for similar samples and high for dissimilar samples.
Parameters
----------
pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2)
3D Array of pairs to predict, with each row corresponding to two
points, or 2D array of indices of pairs if the metric learner uses a
preprocessor.
Returns
-------
y_predicted : `numpy.ndarray` of floats, shape=(n_constraints,)
The predicted learned metric value between samples in every pair.
"""
pairs = check_input(pairs, type_of_inputs='tuples',
preprocessor=self.preprocessor_,
estimator=self, tuple_size=self._tuple_size)
return self.score_pairs(pairs)
def score(self, pairs, y):
"""Computes score of pairs similarity prediction.
Returns the ``roc_auc`` score of the fitted metric learner. It is
computed in the following way: for every value of a threshold
``t`` we classify all pairs of samples where the predicted distance is
inferior to ``t`` as belonging to the "similar" class, and the other as
belonging to the "dissimilar" class, and we count false positive and
true positives as in a classical ``roc_auc`` curve.
Parameters
----------
pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2)
3D Array of pairs, with each row corresponding to two points,
or 2D array of indices of pairs if the metric learner uses a
preprocessor.
y : array-like, shape=(n_constraints,)
The corresponding labels.
Returns
-------
score : float
The ``roc_auc`` score.
"""
return roc_auc_score(y, self.decision_function(pairs))
class _QuadrupletsClassifierMixin(BaseMetricLearner):
_tuple_size = 4 # number of points in a tuple, 4 for quadruplets
def predict(self, quadruplets):
"""Predicts the ordering between sample distances in input quadruplets.
For each quadruplet, returns 1 if the quadruplet is in the right order (
first pair is more similar than second pair), and -1 if not.
Parameters
----------
quadruplets : array-like, shape=(n_quadruplets, 4, n_features) or
(n_quadruplets, 4)
3D Array of quadruplets to predict, with each row corresponding to four
points, or 2D array of indices of quadruplets if the metric learner
uses a preprocessor.
Returns
-------
prediction : `numpy.ndarray` of floats, shape=(n_constraints,)
Predictions of the ordering of pairs, for each quadruplet.
"""
quadruplets = check_input(quadruplets, type_of_inputs='tuples',
preprocessor=self.preprocessor_,
estimator=self, tuple_size=self._tuple_size)
return np.sign(self.decision_function(quadruplets))
def decision_function(self, quadruplets):
"""Predicts differences between sample distances in input quadruplets.
For each quadruplet of samples, computes the difference between the learned
metric of the first pair minus the learned metric of the second pair.
Parameters
----------
quadruplets : array-like, shape=(n_quadruplets, 4, n_features) or
(n_quadruplets, 4)
3D Array of quadruplets to predict, with each row corresponding to four
points, or 2D array of indices of quadruplets if the metric learner
uses a preprocessor.
Returns
-------
decision_function : `numpy.ndarray` of floats, shape=(n_constraints,)
Metric differences.
"""
return (self.score_pairs(quadruplets[:, :2]) -
self.score_pairs(quadruplets[:, 2:]))
def score(self, quadruplets, y=None):
"""Computes score on input quadruplets
Returns the accuracy score of the following classification task: a record
is correctly classified if the predicted similarity between the first two
samples is higher than that of the last two.
Parameters
----------
quadruplets : array-like, shape=(n_quadruplets, 4, n_features) or
(n_quadruplets, 4)
3D Array of quadruplets to score, with each row corresponding to four
points, or 2D array of indices of quadruplets if the metric learner
uses a preprocessor.
y : Ignored, for scikit-learn compatibility.
Returns
-------
score : float
The quadruplets score.
"""
return -np.mean(self.predict(quadruplets))
| all-umass/metric-learn | metric_learn/base_metric.py | Python | mit | 15,825 | 0.002528 |
# Tests specific to the dask class
import os
from numpy.core.shape_base import block
import pytest
import numpy as np
from mock import patch
from numpy.testing import assert_allclose
from astropy.tests.helper import assert_quantity_allclose
from astropy import units as u
from astropy.utils import data
try:
from distributed.utils_test import client, loop, cluster_fixture # noqa
DISTRIBUTED_INSTALLED = True
except ImportError:
DISTRIBUTED_INSTALLED = False
from spectral_cube import DaskSpectralCube, SpectralCube, DaskVaryingResolutionSpectralCube
from .test_casafuncs import make_casa_testimage
try:
import casatools
from casatools import image
CASA_INSTALLED = True
except ImportError:
try:
from taskinit import ia as image
CASA_INSTALLED = True
except ImportError:
CASA_INSTALLED = False
DATA = os.path.join(os.path.dirname(__file__), 'data')
class Array:
args = None
kwargs = None
def compute(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def test_scheduler(data_adv):
cube = DaskSpectralCube.read(data_adv)
fake_array = Array()
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'synchronous'}
with cube.use_dask_scheduler('threads'):
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'threads'}
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'synchronous'}
cube.use_dask_scheduler('threads')
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'threads'}
with cube.use_dask_scheduler('processes', num_workers=4):
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'processes', 'num_workers': 4}
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'threads'}
def test_save_to_tmp_dir(data_adv):
pytest.importorskip('zarr')
cube = DaskSpectralCube.read(data_adv)
cube_new = cube.sigma_clip_spectrally(3, save_to_tmp_dir=True)
# The following test won't necessarily always work in future since the name
# is not really guaranteed, but this is pragmatic enough for now
assert cube_new._data.name.startswith('from-zarr')
def test_rechunk(data_adv):
cube = DaskSpectralCube.read(data_adv)
assert cube._data.chunksize == (4, 3, 2)
cube_new = cube.rechunk(chunks=(1, 2, 3))
# note last element is 2 because the chunk size we asked for
# is larger than cube - this is fine and deliberate in this test
assert cube_new._data.chunksize == (1, 2, 2)
def test_statistics(data_adv):
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=(1, 2, 3))
stats = cube.statistics()
assert_quantity_allclose(stats['npts'], 24)
assert_quantity_allclose(stats['mean'], 0.4941651776136591 * u.K)
assert_quantity_allclose(stats['sigma'], 0.3021908870982011 * u.K)
assert_quantity_allclose(stats['sum'], 11.85996426272782 * u.K)
assert_quantity_allclose(stats['sumsq'], 7.961125988022091 * u.K ** 2)
assert_quantity_allclose(stats['min'], 0.0363300285196364 * u.K)
assert_quantity_allclose(stats['max'], 0.9662900439556562 * u.K)
assert_quantity_allclose(stats['rms'], 0.5759458158839716 * u.K)
@pytest.mark.skipif(not CASA_INSTALLED, reason='Requires CASA to be installed')
def test_statistics_consistency_casa(data_adv, tmp_path):
# Similar to test_statistics but compares to CASA directly.
cube = DaskSpectralCube.read(data_adv)
stats = cube.statistics()
make_casa_testimage(data_adv, tmp_path / 'casa.image')
ia = casatools.image()
ia.open(str(tmp_path / 'casa.image'))
stats_casa = ia.statistics()
ia.close()
for key in stats:
if isinstance(stats[key], u.Quantity):
value = stats[key].value
else:
value = stats[key]
assert_allclose(value, stats_casa[key])
def test_apply_function_parallel_spectral_noncube(data_adv):
'''
Testing returning a non-SpectralCube object with a user-defined
function for spectral operations.
'''
chunk_size = (-1, 1, 2)
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=chunk_size)
def sum_blocks_spectral(data_chunk):
return data_chunk.sum(0)
# Tell dask.map_blocks that we expect the zeroth axis to be (1,)
output_chunk_size = (1, 2)
test = cube.apply_function_parallel_spectral(sum_blocks_spectral,
return_new_cube=False,
accepts_chunks=True,
drop_axis=[0], # The output will no longer contain the spectral axis
chunks=output_chunk_size)
# The total shape of test should be the (1,) + cube.shape[1:]
assert test.shape == cube.shape[1:]
# Test we get the same output as the builtin sum
assert_allclose(test.compute(), cube.sum(axis=0).unitless_filled_data[:])
def test_apply_function_parallel_spectral_noncube_withblockinfo(data_adv):
'''
Test receiving block_info information from da.map_blocks so we can place
the chunk's location in the whole cube when needed.
https://docs.dask.org/en/latest/array-api.html#dask.array.map_blocks
'''
chunk_size = (-1, 1, 2)
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=chunk_size)
sum_spectral_plane = cube.sum(axis=0).unitless_filled_data[:]
# Each value should be different. This is important to check the right positions being used
# for the check in sums_block_spectral
assert np.unique(sum_spectral_plane).size == sum_spectral_plane.size
def sum_blocks_spectral(data_chunk, block_info=None, comparison_array=None):
chunk_sum = data_chunk.sum(0)
# When the block_info kwarg is defined, it should not be None
assert block_info is not None
# Check the block location compared to `comparison_array`
# Get the lower corner location in the whole cube.
loc = [block_range[0] for block_range in block_info[0]['array-location']]
# Should have 3 dimensions for the corner.
assert len(loc) == 3
# Slice comparison array to compare with this data chunk
thisslice = (slice(loc[1], loc[1] + chunk_sum.shape[0]),
slice(loc[2], loc[2] + chunk_sum.shape[1]),)
return chunk_sum == comparison_array[thisslice]
# Tell dask.map_blocks that we expect the zeroth axis to be (1,)
output_chunk_size = (1, 2)
test = cube.apply_function_parallel_spectral(sum_blocks_spectral,
return_new_cube=False,
accepts_chunks=True,
drop_axis=[0], # The output will no longer contain the spectral axis
chunks=output_chunk_size,
comparison_array=sum_spectral_plane) # Passed to `sum_blocks_spectral`
# The total shape of test should be the (1,) + cube.shape[1:]
assert test.shape == cube.shape[1:]
# Test all True
assert np.all(test.compute())
@pytest.mark.parametrize(('accepts_chunks'),
((True, False)))
def test_apply_function_parallel_shape(accepts_chunks):
# regression test for #772
def func(x, add=None):
if add is not None:
y = x + add
else:
raise ValueError("This test is supposed to have add=1")
return y
fn = data.get_pkg_data_filename('tests/data/example_cube.fits', 'spectral_cube')
cube = SpectralCube.read(fn, use_dask=True)
cube2 = SpectralCube.read(fn, use_dask=False)
# Check dask w/both threaded and unthreaded
rslt3 = cube.apply_function_parallel_spectral(func, add=1,
accepts_chunks=accepts_chunks)
with cube.use_dask_scheduler('threads', num_workers=4):
rslt = cube.apply_function_parallel_spectral(func, add=1,
accepts_chunks=accepts_chunks)
rslt2 = cube2.apply_function_parallel_spectral(func, add=1)
np.testing.assert_almost_equal(cube.filled_data[:].value,
cube2.filled_data[:].value)
np.testing.assert_almost_equal(rslt.filled_data[:].value,
rslt2.filled_data[:].value)
np.testing.assert_almost_equal(rslt.filled_data[:].value,
rslt3.filled_data[:].value)
@pytest.mark.parametrize('filename', ('data_adv', 'data_adv_beams',
'data_vda_beams', 'data_vda_beams_image'))
def test_cube_on_cube(filename, request):
if 'image' in filename and not CASA_INSTALLED:
pytest.skip('Requires CASA to be installed')
dataname = request.getfixturevalue(filename)
# regression test for #782
# the regression applies only to VaryingResolutionSpectralCubes
# since they are not SpectralCube subclasses
cube = DaskSpectralCube.read(dataname)
assert isinstance(cube, (DaskSpectralCube, DaskVaryingResolutionSpectralCube))
cube2 = SpectralCube.read(dataname, use_dask=False)
if 'image' not in filename:
# 'image' would be CASA and must be dask
assert not isinstance(cube2, (DaskSpectralCube, DaskVaryingResolutionSpectralCube))
with patch.object(cube, '_cube_on_cube_operation') as mock:
cube * cube
mock.assert_called_once()
with patch.object(cube, '_cube_on_cube_operation') as mock:
cube * cube2
mock.assert_called_once()
with patch.object(cube2, '_cube_on_cube_operation') as mock:
cube2 * cube
mock.assert_called_once()
if DISTRIBUTED_INSTALLED:
def test_dask_distributed(client, tmpdir): # noqa
# Make sure that we can use dask distributed. This is a regression test for
# a bug caused by FilledArrayHandler not being serializable.
cube = DaskSpectralCube.read(os.path.join(DATA, 'basic.image'))
cube.use_dask_scheduler(client)
cube.sigma_clip_spectrally(2, save_to_tmp_dir=tmpdir.strpath)
| radio-astro-tools/spectral-cube | spectral_cube/tests/test_dask.py | Python | bsd-3-clause | 10,242 | 0.002831 |
from __future__ import division, print_function, absolute_import
import sys
from numpy.testing import *
import numpy.linalg as linalg
def random(size):
return rand(*size)
class TestSolve(TestCase):
def bench_random(self):
basic_solve = linalg.solve
print()
print(' Solving system of linear equations')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic ')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
# larger diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
b = random([size])
print('| %6.2f ' % measure('solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_solve(a,b)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestInv(TestCase):
def bench_random(self):
basic_inv = linalg.inv
print()
print(' Finding matrix inverse')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
# large diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
print('| %6.2f ' % measure('inv(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_inv(a)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('inv(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_inv(a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestDet(TestCase):
def bench_random(self):
basic_det = linalg.det
print()
print(' Finding matrix determinant')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic ')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
print('| %6.2f ' % measure('det(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_det(a)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('det(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_det(a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
if __name__ == "__main__":
run_module_suite()
| sargas/scipy | scipy/linalg/benchmarks/bench_basic.py | Python | bsd-3-clause | 4,065 | 0.013284 |
# -*- coding:utf-8 -*-
VERSION = (1, 7, 8, 'final', 0)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
def setup():
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
"""
from django.apps import apps
from django.conf import settings
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
apps.populate(settings.INSTALLED_APPS)
_gevent = None
def using_gevent():
global _gevent
if _gevent is None:
from django.conf import settings
_gevent = settings.USING_GEVENT
return _gevent | wfxiang08/django178 | django/__init__.py | Python | bsd-3-clause | 876 | 0.003425 |
from matplotlib.path import Path
import matplotlib.patches as patches
from math import cos, sin, radians
class RectangleObject:
def __init__(self, centerPosition, sideSize1, sideSize2, label, rotation=0.0, color='black'):
self.sideSize1 = sideSize1
self.sideSize2 = sideSize2
self.label = label
self.rotation = rotation
self.centerPosition = centerPosition
self.color = color
self.calculate_object_position(
sideSize1, sideSize2, centerPosition, rotation)
self.patch = self.make_patch()
def get_object_dimensions(self):
side1 = ((self.x0-self.x1)**2+(self.y0-self.y1)**2)**0.5
side2 = ((self.x1-self.x2)**2+(self.y1-self.y2)**2)**0.5
side3 = ((self.x2-self.x3)**2+(self.y2-self.y3)**2)**0.5
side4 = ((self.x3-self.x0)**2+(self.y3-self.y0)**2)**0.5
return(side1,side2,side3,side4)
def get_center_position(self):
return((self.x0+self.x2)*0.5, (self.y0+self.y2)*0.5)
def make_patch(self):
verts = [
(self.x0,self.y0),
(self.x1,self.y1),
(self.x2,self.y2),
(self.x3,self.y3),
(self.x0,self.y0)]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY]
label = [
self.label,
". Center: ",
str(self.centerPosition),
". Side length: ",
str(self.sideSize1),
"x",
str(self.sideSize2),
". Rotation: ",
str(self.rotation)]
label = ''.join(label)
path = Path(verts, codes)
patch = patches.PathPatch(
path,
lw=2,
fill=False,
color=self.color,
label=label)
return(patch)
def calculate_object_position(self, sideSize1, sideSize2, centerPosition, rotation):
#This can probably be done in a much more elegant way...
temp_x0 = -sideSize1/2.
temp_y0 = -sideSize2/2.
temp_x1 = -sideSize1/2.
temp_y1 = sideSize2/2.
temp_x2 = sideSize1/2.
temp_y2 = sideSize2/2.
temp_x3 = sideSize1/2.
temp_y3 = -sideSize2/2.
x0 = temp_x0*cos(radians(rotation))-temp_y0*sin(radians(rotation))
y0 = temp_x0*sin(radians(rotation))+temp_y0*cos(radians(rotation))
x1 = temp_x1*cos(radians(rotation))-temp_y1*sin(radians(rotation))
y1 = temp_x1*sin(radians(rotation))+temp_y1*cos(radians(rotation))
x2 = temp_x2*cos(radians(rotation))-temp_y2*sin(radians(rotation))
y2 = temp_x2*sin(radians(rotation))+temp_y2*cos(radians(rotation))
x3 = temp_x3*cos(radians(rotation))-temp_y3*sin(radians(rotation))
y3 = temp_x3*sin(radians(rotation))+temp_y3*cos(radians(rotation))
x_center_pos = centerPosition[0]
y_center_pos = centerPosition[1]
self.x0 = x0 + x_center_pos
self.y0 = y0 + y_center_pos
self.x1 = x1 + x_center_pos
self.y1 = y1 + y_center_pos
self.x2 = x2 + x_center_pos
self.y2 = y2 + y_center_pos
self.x3 = x3 + x_center_pos
self.y3 = y3 + y_center_pos
class CircleObject:
def __init__(self, centerPosition, radius, label, color='black'):
self.centerPosition = centerPosition
self.radius = radius
self.label = label
self.color = color
self.patch = self.make_patch()
def make_patch(self):
label = [
self.label,
". Center: ",
str(self.centerPosition),
". Radius: ",
str(self.radius)]
label = ''.join(label)
circle = patches.Circle(
self.centerPosition,
self.radius,
fill=False,
edgecolor=self.color,
label=label)
return(circle)
| magnunor/camera_aperture_position | camera_position_tools.py | Python | gpl-3.0 | 4,017 | 0.005726 |
# -*- coding: utf-8 -*-
# Part of the psychopy.iohub library.
# Copyright (C) 2012-2016 iSolver Software Solutions
# Distributed under the terms of the GNU General Public License (GPL).
# /System/Library/Frameworks/Carbon.framework/Versions/A/Frameworks/
# HIToolbox.framework/Headers/Events.h
QZ_ESCAPE = 0x35
QZ_F1 = 0x7A
QZ_F2 = 0x78
QZ_F3 = 0x63
QZ_F4 = 0x76
QZ_F5 = 0x60
QZ_F6 = 0x61
QZ_F7 = 0x62
QZ_F8 = 0x64
QZ_F9 = 0x65
QZ_F10 = 0x6D
QZ_F11 = 0x67
QZ_F12 = 0x6F
QZ_F13 = 0x69
QZ_F14 = 0x6B
QZ_F15 = 0x71
QZ_F16 = 0x6A
QZ_F17 = 0x40
QZ_F18 = 0x4F
QZ_F19 = 0x50
QZ_F20 = 0x5A
QZ_BACKQUOTE = 0x32
QZ_MINUS = 0x1B
QZ_EQUALS = 0x18
QZ_BACKSPACE = 0x33
QZ_INSERT = 0x72
QZ_HOME = 0x73
QZ_PAGEUP = 0x74
QZ_NUMLOCK = 0x47
QZ_KP_EQUALS = 0x51
QZ_KP_DIVIDE = 0x4B
QZ_KP_MULTIPLY = 0x43
QZ_TAB = 0x30
QZ_LEFTBRACKET = 0x21
QZ_RIGHTBRACKET = 0x1E
QZ_BACKSLASH = 0x2A
QZ_DELETE = 0x75
QZ_END = 0x77
QZ_PAGEDOWN = 0x79
QZ_KP7 = 0x59
QZ_KP8 = 0x5B
QZ_KP9 = 0x5C
QZ_KP_MINUS = 0x4E
QZ_CAPSLOCK = 0x39
QZ_SEMICOLON = 0x29
QZ_QUOTE = 0x27
QZ_RETURN = 0x24
QZ_KP4 = 0x56
QZ_KP5 = 0x57
QZ_KP6 = 0x58
QZ_KP_PLUS = 0x45
QZ_LSHIFT = 0x38
QZ_COMMA = 0x2B
QZ_PERIOD = 0x2F
QZ_SLASH = 0x2C
QZ_RSHIFT = 0x3C
QZ_UP = 0x7E
QZ_KP1 = 0x53
QZ_KP2 = 0x54
QZ_KP3 = 0x55
QZ_NUM_ENTER = 0x4C
QZ_LCTRL = 0x3B
QZ_LALT = 0x3A
QZ_LCMD = 0x37
QZ_SPACE = 0x31
QZ_RCMD = 0x36
QZ_RALT = 0x3D
QZ_RCTRL = 0x3E
QZ_FUNCTION = 0x3F
QZ_LEFT = 0x7B
QZ_DOWN = 0x7D
QZ_RIGHT = 0x7C
QZ_KP0 = 0x52
QZ_KP_PERIOD = 0x41
QZ_F1 = 145 # Keycode on Apple wireless kb
QZ_F2 = 144 # Keycode on Apple wireless kb
QZ_F3 = 160 # Keycode on Apple wireless kb
QZ_F4 = 131 # Keycode on Apple wireless kb
code2label = {}
# need tp copy locals for py3
for k, v in locals().copy().items():
if k.startswith('QZ_'):
klabel = u'' + k[3:].lower()
code2label[klabel] = v
code2label[v] = klabel
| psychopy/versions | psychopy/iohub/devices/keyboard/darwinkey.py | Python | gpl-3.0 | 1,860 | 0 |
#!/usr/bin/env python3
# forked from spline1986's versions
import os, sys, sqlite3
from ii_functions import *
args=sys.argv[1:]
if len(args)==0:
print("Usage: sqlite-export.py <db_file>")
sys.exit(1)
check_dirs()
conn = sqlite3.connect(args[0])
c = conn.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS msg(
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
msgid TEXT,
kludges TEXT,
echoarea TEXT,
timestump INTEGER,
from_name TEXT,
address TEXT,
to_name TEXT,
subject TEXT,
body TEXT,
UNIQUE (id, msgid));""")
echoareas = sorted(os.listdir(indexdir))
for echoarea in echoareas:
print("Echoarea: " + echoarea)
msgids = getMsgList(echoarea)
for msgid in msgids[:-1]:
print("MSGID: " + msgid)
msg = getMsg(msgid)
c.execute("INSERT OR IGNORE INTO msg (msgid, kludges, echoarea, timestump, from_name, address, to_name, subject, body) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);", (msgid, "/".join(msg["tags"]), msg["echo"], msg["time"], msg["sender"], msg["addr"], msg["to"], msg["subj"], msg["msg"]))
print("OK")
conn.commit()
conn.close()
| vit1-irk/ii-db-utils | sqlite-export.py | Python | cc0-1.0 | 1,050 | 0.013333 |
import os
import sys
import logging
import uuid
import traceback
import datetime
import cgi
import MySQLdb
import stripe
import re
import requests
import urllib
import time
import pmsconstants
from flask import Flask, render_template, request, jsonify, redirect, url_for, Markup, session, Response
from werkzeug import parse_options_header, generate_password_hash, check_password_hash
from google.appengine.datastore.datastore_query import Cursor
from google.appengine.api import mail, users, memcache, images
from google.appengine.ext import ndb, blobstore
from google.appengine.ext.webapp import blobstore_handlers
from datetime import date, datetime, timedelta
from webapp2_extras import security
from pmsdatamodel import Members.........
from pmsmemberinfo import MemberInfo
from pmsemailutility import EmailerUtility
from pmstextutility import TextUtility
app = Flask(__name__)
app.secret_key = ...
#Custom template filters
@app.template_filter('format_cents_as_currency')
def format_cents_as_currency_filter(value):
return "${:,.2f}".format(float(value) / 100.0)
#:: SIGNING IN AUTHENTICATION ::
#Someone is trying to login
@app.route('/signinauthenticate', methods=['POST'])
def signinauthenticate():
#grab the request data
try:
#or use a email parsing library, you get the idea and do something...
inputEmailAddress = request.form.get("inputEmailAddress")
if not re.match(r"[^@]+@[^@]+\.[^@]+", inputEmailAddress):
if not re.match(r"^[A-Za-z0-9\.\+_-]+@[A-Za-z0-9\._-]+\.[a-zA-Z]*$", inputEmailAddress):
inputPassword = request.form.get("inputPassword")
#Query NoSQL and find out if this member already exists by email, if so, show the error
member = MemberInfo()
member = member.getMemberInfoByEmail(inputEmailAddress)
#Make sure the password is correct
if not check_password_hash(member.passwordhash, inputPassword):
return render_template('index.html', inputEmailAddress=inputEmailAddress, alertmessage='It appears that is not quite right.')
#Save the session and cookie values (do more than just email, but again, you get the idea)
session[_SESSION_COOKIE_EMAIL] = member.emailaddress
return redirect(url_for('landingpage'))
except:
return render_template('index.html', inputEmailAddress='', alertmessage='Oops!')
#:: SAVE USER PROFILE PHOTO ::
#This route only gets used when a user saves updates to their profile photo
@app.route('/userprofilephotoconfirm', methods=['POST'])
def userprofilephotoconfirm():
member = MemberInfo()
#this will cause an ugly key error if we don't handle it properly
try:
inputUploadedPictureFile = request.files['inputProfilepicture']
if inputUploadedPictureFile:
header = inputUploadedPictureFile.headers['Content-Type']
parsed_header = parse_options_header(header)
blob_key = parsed_header[1]['blob-key']
except:
#no need to log this error output
dummyvariable = ""
#a user is uploading a picture, either new if they did not have one prior, or uploaded a new one which would delete the old one
if inputUploadedPictureFile:
if member.pictureblobstorekey:
blobstore.delete(member.pictureblobstorekey)
images.delete_serving_url(member.pictureblobstorekey)
member.pictureservingurl = images.get_serving_url(blob_key)
member.pictureblobstorekey = blob_key
member.put()
return render_template('userprofilephotosaved.html', member=member)
except:
try:
#If you couldn't complete the user save, be sure to delete the photo from the blobstore or re-use it later (to avoid a lost child hanging around)
inputUploadedPictureFile = request.files['inputProfilepicture']
if inputUploadedPictureFile:
header = inputUploadedPictureFile.headers['Content-Type']
parsed_header = parse_options_header(header)
blob_key = parsed_header[1]['blob-key']
blobstore.delete(blob_key)
except:
#no need to log this error output
dummyvariable = ""
#Create a new form POST URL for the blobstore
userprofilephoto_form_url = blobstore.create_upload_url('/userprofilephotoconfirm')
return render_template('userprofilephoto.html', member=member, userprofilephoto_form_url=userprofilephoto_form_url, user_profilepicturesrc=user_profilepicturesrc, alertmessage='Oops!', userprofilephoto_form_url=userprofilephoto_form_url, user_profilepicturesrc=user_profilepicturesrc)
#:: SUBSCRIPTION SIGN UP CONFIRMATION ::
#This route only gets used when a parent signs up for a plan
@app.route('/subscriptionsignupconfirm', methods=['POST'])
def subscriptionsignupconfirm():
member = MemberInfo()
try:
#Set the required stripe API key that is going to be used
stripe.api_key = _STRIPE_SECRET_KEY
#If this person has a stripecustomerid (they are a Stripe customer object), then just update the plan!
if stripeprofile.stripecustomerid:
#Retrieve the customer from Stripe
try:
stripeCustomer = stripe.Customer.retrieve(stripeprofile.stripecustomerid)
except:
# The card has been declined
logging.error(':: Error | subscriptionsignupconfirm | 1 -- Error creating a new subscription ... ::')
raise Exception
else:
#If this person does not have a stripecustomerid (they are not a Stripe customer object), then they MUST have a token, otherwise we bomb
inputStripeToken = request.form.get("inputStripeToken")
if not inputStripeToken:
logging.error(':: Error | subscriptionsignupconfirm | 1 -- inputStripeToken was None ... ::')
raise Exception
#Create a new Stripe customer for this member
try:
stripeCustomer = stripe.Customer.create(
source=inputStripeToken,
email=member.emailaddress
)
#Save that payment profile object
stripeprofile.stripecustomerid = stripeCustomer.id
except:
# The card has been declined
logging.error(':: Error | subscriptionsignupconfirm | 1 -- Error creating a new subscription ... ::')
raise Exception
#This customer update call will update the customer subscription
try:
#Save the plan on the customer record at Stripe
#planType could be any plan you set up at Stripe, like a yearly or monthly plans perhaps
subscription = stripeCustomer.subscriptions.create(plan=planType)
#Save the plan type for the user in NoSQL
stripeprofile.stripe_subscription_plan = planType
stripeprofile.stripe_subscription_id = subscription.id
#You could even use gift codes in your app very easily too
#if inputGiftCode:
# stripeprofile.subscriptiongiftcode = inputGiftCode
#else:
# stripeprofile.subscriptiongiftcode = None
#stripeprofile.put()
except:
# The card has been declined
logging.error(':: Error | subscriptionsignupconfirm | 1 -- Error creating a new subscription ... ::')
raise Exception
return redirect(url_for('subscriptionsignupsuccess'))
except:
logging.error(':: Error | subscriptionsignupconfirm | An error occurred trying ... ::')
logging.error(traceback.format_exc())
return render_template('subscriptionsignupfailure.html', member=member)
#:: STRIPE ACCOUNT WEBHOOK ::
#Used by Stripe to contact us programmatically telling us about certain back-end events, like an account that has become unverified due to incorrect information
@app.route('/stripewebhookbdfjkl4378hsfk43jkasdkl', methods=['POST'])
def stripewebhookbdfjkl4378hsfk43jkasdkl():
webhookJSON = request.get_json()
#Get the type of event
eventType = webhookJSON.get('type')
#Get the live mode of event
eventMode = webhookJSON.get('livemode')
if not eventMode:
return Response(status=200)
#Get the event ID and Account ID
eventID = webhookJSON.get('id')
eventAccountID = webhookJSON.get('user_id')
#Check if this event ID already exists in our system, no need to call Stripe for a duplicate event
if eventexists...:
return Response(status=200)
#Call Stripe asking for event details for that event ID.
stripe.api_key = pmsconstants._STRIPE_SECRET_KEY
stripeEvent = None
try:
#Get the stripe event from Stripe itself using the eventID as input
stripeEvent = stripe.Event.retrieve(id=eventID, stripe_account=eventAccountID)
except:
#If botched request for some reason return 300 so Stripe will re-send it
logging.error(traceback.format_exc())
return Response(status=300)
#If botched request for some reason return 300 so Stripe will re-send it
if not stripeEvent:
logging.error(traceback.format_exc())
return Response(status=300)
#Check and make sure the event from Stripe is live and not test and also an account.updated event
if stripeEvent.type=='account.updated' and stripeEvent.livemode:
#Call Stripe, asking for the Account entity, get legal_entity
stripeAccount = stripe.Account.retrieve(stripeprofile.stripeaccountid)
#and so on...keep processing whatever business logic is required
return Response(status=200)
#:: LOGOUT ::
#This is where users will logout
@app.route('/signout')
def signout(action=None, param=None):
#Remove the session cookie security goodies
if _SESSION_COOKIE_EMAIL in session:
session.pop(_SESSION_COOKIE_EMAIL, None)
return render_template('signedout.html')
#:: Error handlers ::
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
logging.error(':: A 404 was thrown a bad URL was requested ::')
logging.error(traceback.format_exc())
return render_template('404.html'), 404
@app.errorhandler(400)
def key_error(e):
logging.error(':: A 400 was thrown key_error ::')
logging.error(traceback.format_exc())
return render_template('400.html'), 400
| scottsappen/PayMySitter | main.py | Python | apache-2.0 | 10,136 | 0.016772 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
Bitcharts - ORM classes and functions
Copyright(c) 2014 - Lisandro Gallo (lisogallo)
liso@riseup.net
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import re
import sys
import smtplib
import requests
import argparse
import simplejson as json
from collections import OrderedDict
from sqlalchemy.engine import Engine
from BeautifulSoup import BeautifulSoup
from ConfigParser import SafeConfigParser
from datetime import date, datetime, timedelta
from sqlalchemy import exc, event, create_engine, ForeignKey, Sequence
from sqlalchemy import Column, Date, Time, Integer, String, Boolean, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref, aliased, sessionmaker
@event.listens_for(Engine, 'connect')
def set_sqlite_pragma(dbapi_connection, connection_record):
"""
Decorator to force the support of foreign keys on SQLite.
:param dbapi_connection: Engine object.
:param connection_record: Connection string.
"""
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
def open_session(engine):
"""
Open session on current connection and return session object.
:param engine: Engine object for current connection.
"""
Session = sessionmaker(bind=engine)
session = Session()
return session
def connect_database(database_url):
"""
Create connection to engine and return engine object.
:param database_url: Full path URL to SQLite database.
"""
# Set 'echo' to True to get verbose output.
engine = create_engine(database_url, echo=False)
return engine
def create_tables(database_url):
"""
Create database schema.
:param database_url: Full path URL to SQLite database.
"""
engine = connect_database(database_url)
Base.metadata.create_all(engine)
def config_parser(config_file):
"""
Parse data from configuration files.
:param config_file: Configuration file with currencies or exchanges data.
"""
res = []
# Parse and read configuration file.
cparser = SafeConfigParser()
cparser.read(config_file)
for section in cparser.sections():
tup = ()
for option in cparser.options(section):
value = cparser.get(section, option)
# String 'True' or 'False' values to boolean
if value == 'True':
value = True
elif value == 'False':
value = False
tup += (value, )
res.append(tup)
return res
def send_email(sender, receiver, subject, body):
"""
Auxiliar function to inform by mail about any unexpected exception.
:param sender: From mail address.
:param receiver: Destination mail address.
:param subject: Subject.
:param body: Content body.
"""
try:
msg = ("From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s"
%(sender, receiver, subject, body))
smtp = smtplib.SMTP('localhost')
smtp.sendmail(sender, [receiver], msg)
smtp.quit()
except Exception as exception:
print 'Error %s:' % exception.args[0]
def get_json(url):
"""
Get JSON resource from remote URL.
:param url: Full URL to JSON resource over HTTP protocol.
"""
try:
req = requests.get(url,
headers={'Accept': 'application/json'},
timeout=5)
res = req.json()
return res
except Exception as exception:
print 'Error %s:' % exception.args[0]
send_email(
'daemon@bitcharts.org',
'staff@bitcharts.org',
'ERROR',
exception.args[0]
)
res = {}
return res
def is_dict(something):
"""
Check if input object is a dictionary or contains a dictionary.
Return the dictionary found.
:param something: Input object to check.
"""
if type(something) is dict:
for values in something.itervalues():
if type(values) is dict:
return is_dict(values)
return something
def parse_values(dictionary):
"""
Search for common keys in exchange's APIs which contains currencies values.
:param dictionary: Dictionary previously obtained from JSON APIs.
"""
# Check if input is or contains a dictionary and returns it.
res = is_dict(dictionary)
# Search for common keys used on APIs and store its values
if 'last' in res.iterkeys():
try:
last = float(res.get('last'))
return last
except TypeError:
return None
elif 'blue' in res.iterkeys():
try:
blue = float(res.get('blue'))
return blue
except TypeError:
return None
def write_object(database_url, new_object):
"""
Write new currency, exchange or association object to database through ORM.
:param database_url: Full path URL to SQLite database.
:param new_object: Object variable.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
session.add(new_object)
session.commit()
except exc.SQLAlchemyError, exception:
if session:
session.rollback()
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def initialize_database(database_url, currencies_file, exchanges_file):
"""
Initialize Bitcharts database with exchanges and currencies data
parsed from configuration files.
:param database_url: Full path URL to SQLite database.
:param currencies_file: Configuration file with currencies information.
:param exchanges_file: Configuration file with exchanges information.
"""
currencies = config_parser(currencies_file)
# From data in configuration file create each currency ORM object.
for currency in currencies:
name, description, cryptocurrency, active = currency
new_currency = Currency(name,
description,
cryptocurrency,
active)
write_object(database_url, new_currency)
try:
engine = connect_database(database_url)
session = open_session(engine)
# From data in configuration file create each currency ORM object.
exchanges = config_parser(exchanges_file)
# Open a session and query the associated currency id from the
# currency name (unique) in the configuration file.
for exchange in exchanges:
name, country, url, api, currency_name, active = exchange
query = session.query(Currency.id).filter(
Currency.name == currency_name).first()
currency_id = query[0]
# From data in configuration file create each currency ORM object.
new_exchange = Exchange(name,
country,
url,
api,
currency_id,
active)
write_object(database_url, new_exchange)
except exc.SQLAlchemyError, exception:
if session:
session.rollback()
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def clean_database(database_url, days):
"""
Clean old records from database keeping only the last X days.
:param database_url: Full path URL to SQLite database.
:param days: Number of days to keep.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
today = date.today()
last_day = today + timedelta(-int(days))
query = session.query(Association).filter(
Association.date < last_day).all()
for row in query:
session.delete(row)
session.commit()
except exc.SQLAlchemyError, exception:
if session:
session.rollback()
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def write_values(database_url):
"""
Write current currencies values obtained from exchanges as new association
objects in the ORM.
:param database_url: Full path URL to SQLite database.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
# Store in an intermediate class the current active currencies
active_currency_ids = aliased(Currency, session.query(Currency)
.filter(Currency.active == 1).subquery())
# Store in an intermediate class the current active exchanges
active_exchange_ids = aliased(Exchange, session.query(Exchange)
.filter(Exchange.active == 1).subquery())
# Store in an intermediate class the current active exchanges for
# current active currencies
active_currency_exchange_ids = aliased(
session.query(active_exchange_ids).filter(
active_exchange_ids.currency_id == active_currency_ids.id)
.subquery())
query = session.query(active_currency_exchange_ids).all()
# Get the active currency values from an active exchange and store
# data on an association object. Timestamp it is also stored.
for exchange in query:
api_url = exchange.url + exchange.api
from_api = get_json(api_url)
if from_api:
last = parse_values(from_api)
if last:
new_assoc = Association(exchange.id,
exchange.currency_id,
last)
write_object(database_url, new_assoc)
except exc.SQLAlchemyError, exception:
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def write_json_file(ordered_dict, json_path):
"""
Create database schema.
:param ordered_dict: Ordered dictionary to be serialized to JSON.
:param json_path: Fullpath to write serialized JSON in filesystem.
"""
pretty_json = json.dumps(ordered_dict, sort_keys=False, indent=4 * ' ')
# Write a pretty formated JSON to a file
with open(json_path, 'w') as json_file:
print >> json_file, pretty_json
json_file.close()
def generate_sources_json(database_url, output_dir):
"""
Generates a JSON file on filesystem for the Bitcharts' API.
:param database_url: Full path URL to SQLite database.
:param output_dir: Output directory to write serialized JSON in filesystem.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
# Get current active exchanges
active_exchange_ids = aliased(Exchange, session.query(Exchange).filter(
Exchange.active == 1).subquery())
exchanges = session.query(active_exchange_ids).all()
# Ordered dictionary to be serialized to JSON
sources_dict = OrderedDict()
# Show the timestamp on the JSON API
sources_dict['timestamp'] = datetime.now().strftime(
"%a %b %d %Y, %H:%M:%S")
# Get a dict for each exchange and append it to the main sources dict
for exchange in exchanges:
query = session.query(Association).filter(
Association.exchange_id == exchange.id).order_by(
Association.date.desc()).order_by(
Association.time.desc()).first()
key_name, row_dict = query.asdict(session)
sources_dict[key_name] = row_dict
# Generate JSON file from ordered dictionary
json_path = output_dir + 'sources.json'
print 'Generating ' + json_path + ' file...'
write_json_file(sources_dict, json_path)
except exc.SQLAlchemyError, exception:
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def generate_graphs_json(database_url, output_dir):
"""
Generates a JSON file on filesystem for the Bitcharts' graphs.
:param database_url: Full path URL to SQLite database.
:param output_dir: Output directory to write serialized JSON in filesystem.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
# Get current active exchanges
active_exchange_ids = aliased(Exchange, session.query(Exchange).filter(
Exchange.active == 1).subquery())
exchanges = session.query(active_exchange_ids).all()
# Store the actual date
today = date.today()
# Show the timestamp on the JSON API
graphs_dict = OrderedDict()
graphs_dict['timestamp'] = datetime.now().strftime(
"%a %b %d %Y, %H:%M:%S")
# The following generates a Python dictionary storing BTC values
# for the last 10 days obtained from active BTC exchanges
for exchange in exchanges:
values = []
# Iterate on days from today to the last 10 days
for i in range(1, 11):
day = today + timedelta(days=-i)
# Get the last currency value stored for current day
query = session.query(Association).filter(
Association.date == day).filter(
Association.exchange_id == exchange.id).order_by(
Association.time.desc()).first()
if query is None:
# If the script is getting null values for current day,
# then puts the last value obtained.
if day == today:
query = session.query(Association).filter(
Association.exchange_id == exchange.id).order_by(
Association.time.desc()).first()
values.append(query.last)
else:
values.append(None)
else:
values.append(query.last)
key_name = exchange.name.lower()
graphs_dict[key_name] = values[::-1]
# Generate JSON file from ordered dictionary
json_path = output_dir + 'graphs.json'
print 'Generating ' + json_path + ' file...'
write_json_file(graphs_dict, json_path)
except exc.SQLAlchemyError, exception:
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def generate_marketcap_json(output_dir):
"""
Get marketcap values from coinmarketcap.com and output to a JSON file.
:param output_dir: Output directory to write serialized JSON in filesystem.
"""
try:
# Get full web page from Coinmarketcap.com index.
session = requests.Session()
link = 'http://coinmarketcap.com/'
req = session.get(link)
# Create BeautifulSoup object with web response.
soup = BeautifulSoup(req.text)
# Ordered dictionary object to store data to be JSON serialized.
marketcap_dict = OrderedDict()
marketcap_dict['timestamp'] = datetime.now().strftime(
'%a %b %d %Y, %H:%M:%S')
marketcap_dict['currencies'] = []
# Regex expression to search for patterns in web page.
anything = re.compile('^.*$')
name_regex = re.compile('^.*\\bcurrency-name\\b.*$')
marketcap_regex = re.compile('^.*\\bmarket-cap\\b.*$')
price_regex = re.compile('^.*\\bprice\\b.*$')
positive_change_regex = re.compile('^.*\\bpositive_change\\b.*$')
negative_change_regex = re.compile('^.*\\bnegative_change\\b.*$')
# Find HTML <tr> tags for each currency.
table = soup.findAll('tr', {'id': anything})
# Find the top 5 (five) currencies with the highest marketcap
# and obtain their values
for item in table[:5]:
currency = []
# Get the currency name
names = item.findAll('td', {'class': name_regex})
for name in names:
currency.append(name.find('a').contents[0].strip())
# Get the marketcap value
marketcaps = item.findAll('td', {'class': marketcap_regex})
for marketcap in marketcaps:
currency.append(marketcap.contents[0].strip())
# Get the price value
prices = item.findAll('a', {'class': price_regex})
for price in prices:
currency.append(price.contents[0].strip())
# Get the change percentage and sign
changes = item.findAll('td', {'class': positive_change_regex})
if changes:
for change in changes:
currency.append(change.contents[0].strip())
currency.append('positive')
else:
changes = item.findAll('td', {'class': negative_change_regex})
for change in changes:
currency.append(change.contents[0].strip())
currency.append('negative')
marketcap_dict['currencies'].append(currency)
# Generate JSON file from ordered dictionary
json_path = output_dir + 'marketcap.json'
print 'Generating ' + json_path + ' file...'
write_json_file(marketcap_dict, json_path)
except Exception as exception:
print 'Error %s:' % exception.args[0]
send_email(
'daemon@bitcharts.org',
'staff@bitcharts.org',
'ERROR',
exception.args[0]
)
def get_last_from_exchange(database_url, exchange_name):
"""
Get last value obtained from the specified exchange.
:param database_url: Full path URL to SQLite database.
:param exchange_name: Exchange name.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
# Get the exchange object for the given exchange name
exchange_obj = session.query(Exchange).filter(
Exchange.name == exchange_name).first()
# If exchange exists get its last currency value in database
if exchange_obj:
association_obj = session.query(Association).filter(
Association.exchange_id == exchange_obj.id).order_by(
Association.date.desc()).order_by(
Association.time.desc()).first()
return association_obj.last
else:
return None
except exc.SQLAlchemyError, exception:
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
Base = declarative_base()
class Currency(Base):
"""
SQLAlchemy ORM class to store information about currencies on database.
"""
__tablename__ = "currencies"
id = Column(Integer, Sequence('currency_id_seq'), primary_key=True)
name = Column(String(10), unique=True)
description = Column(String)
cryptocurrency = Column(Boolean)
active = Column(Boolean)
def __init__(self, name, description, cryptocurrency, active):
"""Docstring"""
self.name = name
self.description = description
self.cryptocurrency = cryptocurrency
self.active = active
class Exchange(Base):
"""
SQLAlchemy ORM class to store information about exchanges on database.
"""
__tablename__ = "exchanges"
id = Column(Integer, Sequence('exchange_id_seq'), primary_key=True)
name = Column(String(10))
country = Column(String(10))
url = Column(String)
api = Column(String)
currency_id = Column(Integer, ForeignKey("currencies.id"))
currency = relationship("Currency",
backref=backref("exchanges", order_by=id))
active = Column(Boolean)
def __init__(self, name, country, url, api, currency_id, active):
"""Docstring"""
self.name = name
self.country = country
self.url = url
self.api = api
self.currency_id = currency_id
self.active = active
class Association(Base):
"""
SQLAlchemy ORM class to store current currencies' values obtained from
APIs available on each exchange.
"""
__tablename__ = 'exchanges_currencies'
id = Column(Integer, Sequence('association_id_seq'), primary_key=True)
exchange_id = Column(Integer, ForeignKey('exchanges.id'))
currency_id = Column(Integer, ForeignKey('currencies.id'))
last = Column(Float)
date = Column(Date, default=date.today())
time = Column(Time, default=datetime.now().time())
def __init__(self, exchange_id, currency_id, last):
self.exchange_id = exchange_id
self.currency_id = currency_id
self.last = last
def asdict(self, session):
"""
Function which returns an ordered dictionary containing field values
from the current Association object.
"""
properties = OrderedDict()
id = self.exchange_id
exchange = session.query(Exchange).filter(Exchange.id == id).first()
properties['display_URL'] = exchange.url
properties['display_name'] = exchange.name
properties['currency'] = exchange.currency.name
if exchange.currency.name == 'ARS':
properties['blue'] = self.last
else:
properties['last'] = self.last
return exchange.name.lower(), properties
def parse_args():
"""
Options parser for running the Python script directly from shell to
create and initialize Bitcharts database for the first time.
"""
parser = argparse.ArgumentParser(prog='bitcharts.py')
parser.add_argument("-d", "--database-name",
help="Name for new database to create",
dest="database_name")
parser.add_argument("-c", "--currencies-file",
help="Configuration file with currencies information",
dest="currencies_file")
parser.add_argument("-e", "--exchanges-file",
help="Configuration file with exchanges information",
dest="exchanges_file")
if len(sys.argv) < 6:
print '\nERROR: Too few arguments.\n'
parser.print_help()
sys.exit()
args = parser.parse_args()
if args.database_name and args.currencies_file and args.exchanges_file:
# Append to filename a descriptive extension
filename = args.database_name + '.sqlite'
# If database file exists do not overwrite
if os.path.exists(filename):
print '\nERROR: Database file '" + filename + "' already exists.\n'
sys.exit()
# Compose the full database URL to be passed to db connection string
database_url = 'sqlite:///' + filename
# Create database schema based on the SQLAlchemy ORM
create_tables(database_url)
# Initialize database with values obtained from configuration files
initialize_database(database_url,
args.currencies_file,
args.exchanges_file)
else:
print '\nERROR: You must specify valid inputs.\n'
parser.print_help()
sys.exit()
def main():
"""
Main function.
"""
try:
parse_args()
except KeyboardInterrupt:
print 'Aborting... Interrupted by user.'
sys.exit(0)
if __name__ == '__main__':
main()
| lisogallo/bitcharts-core-legacy | bitcharts.py | Python | agpl-3.0 | 24,568 | 0.00057 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import ijson
import shutil
import logging
import zipfile
import tempfile
import requests
from .. import base
from .record import Record
logger = logging.getLogger(__name__)
# Module API
def collect(conf, conn):
"""Collect FDA Drug Labels.
"""
# For more information see:
# https://open.fda.gov/api/reference/
URL = 'http://download.open.fda.gov/drug/label/{file}.zip'
FILES = [
'drug-label-0001-of-0005.json',
'drug-label-0002-of-0005.json',
'drug-label-0003-of-0005.json',
'drug-label-0004-of-0005.json',
'drug-label-0005-of-0005.json',
]
# Create temp directory
dirpath = tempfile.mkdtemp()
success = 0
for file in FILES:
# Download json
url = URL.format(file=file)
arch = zipfile.ZipFile(io.BytesIO(requests.get(url).content))
path = arch.extract(file, dirpath)
file = io.open(path, encoding='utf-8')
# Get last updated
last_updated = list(ijson.items(file, 'meta.last_updated'))[0]
# Get items iterator
file.seek(0)
items = ijson.items(file, 'results.item')
for item in items:
meta = item['openfda']
base.config.SENTRY.extra_context({
'url': url,
'item': meta,
})
# Skip if no NDC code
if 'product_ndc' not in meta:
continue
# Get data
data = {
'product_ndc': meta['product_ndc'][0],
'product_type': meta['product_type'][0],
'generic_name': meta['generic_name'][0],
'brand_name': meta['brand_name'][0],
'last_updated': last_updated,
}
if meta.get('application_number'):
data['fda_application_number'] = meta['application_number'][0]
# Create record
record = Record.create(url, data)
# Write record
record.write(conf, conn)
# Log info
success += 1
if not success % 100:
logger.info('Collected %s "%s" interventions',
success, record.table)
# Remove temp directory
shutil.rmtree(dirpath)
| opentrials/collectors | collectors/fdadl/collector.py | Python | mit | 2,443 | 0.000409 |
import search_duplicated_task
| 3dfxsoftware/cbss-addons | duplicated_tasks/wizard/__init__.py | Python | gpl-2.0 | 30 | 0 |
# -*- coding: utf-8 -*-
formatter = "%r %r %r %r"
print formatter % (1,2,3,4)
print formatter % ("one", "two", "three", "four")
print formatter % (True, False, False, True)
print formatter % (formatter, formatter, formatter, formatter)
print formatter % (
"Whose woods these are",
"I think I know",
"His house is in",
"The village though"
)
| sdwebster/learn-python-the-hard-way-solutions | 08/ex8.py | Python | mit | 352 | 0.011364 |
import backend # hopefully fixes issues with Celery finding tasks?
| bleedingwolf/Spyglass | spyglass/__init__.py | Python | bsd-3-clause | 68 | 0.014706 |
import threading
from multiprocessing import cpu_count
class Terminator:
def __init__(self):
self._should_stop = False
def stop(self):
self._should_stop = True
def should_stop(self):
return self._should_stop
class Handler(object):
# no parameters are permitted; all configuration should be placed in the
# configuration file and handled in the Initialize() method
def __init__(self):
pass
# called when the service is starting
def Initialize(self, configFileName):
self.terminator = Terminator()
# called when the service is starting immediately after Initialize()
# use this to perform the work of the service; don't forget to set or check
# for the stop event or the service GUI will not respond to requests to
# stop the service
def Run(self):
from buildpal.server.runner import ServerRunner
ServerRunner(0, cpu_count()).run(self.terminator)
# called when the service is being stopped by the service manager GUI
def Stop(self):
self.terminator.stop()
| pkesist/buildpal | Python/server_svc_handler.py | Python | gpl-3.0 | 1,123 | 0.002671 |
#!/usr/bin/env python
import socket,sys,gateway_cfg,select,socketserver,http.server,urllib
from threading import *
class WebHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
parseParams = urllib.parse.urlparse(self.path)
if parseParams.path=="/t" :
self.send_error(404,"You can't pass!!")
else:
self.send_response(200)
self.send_header('Content-Type', 'application/html')
self.end_headers()
self.wfile.write("Hello World!!")
self.wfile.close()
class webserver (Thread):
def __init__(self,condition):
#init
Thread.__init__(self)
self.con = condition
def run(self):
#run
print("web server start!!")
Handler = WebHandler
httpd = http.server.HTTPServer(("", 8080), Handler)
httpd.serve_forever()
class msgcenter (Thread):
def __init__(self,condition):
#init server setting
Thread.__init__(self)
self.con = condition
try:
print("start config")
self.server = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.server.bind((gateway_cfg.address['host'],gateway_cfg.address['port']))
self.server.listen(gateway_cfg.max_user)
self.break_out = False
except socket.error as msg:
print("[ERROR] %s\n" % msg)
self.break_out = True
def run(self):
#start
if self.break_out == False:
print("msgcenter start!!")
while True:
try:
connection,address = self.server.accept()
connection.setblocking(0)
connection.close()
except IOError as e:
if e.errno == 11:
raise
else:
print("socket error")
exit(-1)
| fucxy/fucxy-node | gateway/modules.py | Python | gpl-3.0 | 1,672 | 0.031699 |
#!python
#-*- encoding=utf-8 -*-
import sys, sqlite3, logging, os, os.path
import wx, time, re, copy, webbrowser
import wx.grid, wx.html
import json, math
from cfg import config
from util import *
from model import *
from view import *
class App(wx.App):
instance = None
def __init__(self, conf, *args):
wx.App.__init__(self, *args) # do not redirect for now
self.user = None
self.cfg = conf
self.printer = None
def OnInit(self):
return True
@staticmethod
def GetInstance():
return App.instance
def Quit(self):
wx.Exit()
sys.exit()
def Run(self):
if not os.path.isfile( self.cfg.datapath ):
self.setup()
else:
self.bootstrap()
self.checkExpiration()
AuthFrame(self).Show()
def bootstrap(self):
self.dbconn = DB.getInstance().conn
self.modelUser = ModelUser( self.dbconn )
self.modelSheet= ModelSheet( self.dbconn )
self.logger = XLog.getDefaultLogger()
def setup(self):
try:
os.makedirs( os.path.join(self.cfg.rootdir, r'data') )
os.makedirs( os.path.join(self.cfg.rootdir, r'log') )
os.makedirs( os.path.join(self.cfg.rootdir, r'cache') )
except:
alert( u'程序初始化失败, 即将退出' )
self.Quit()
self.bootstrap()
self.modelUser.initTable()
self.modelSheet.initTable()
def checkExpiration(self):
self.expirationTip = ''
return True # skip expiration check
self.appExpireInDays = self.cfg.expiration
time0 = self.modelUser.getEarliestDate()
daysElapsed = 0
if time0>0:
daysElapsed = int( (time.time()-time0)/86400 )
if daysElapsed > -1:
if self.appExpireInDays < daysElapsed:
self.expire()
self.appExpireInDays -= daysElapsed
daysElapsed
self.expirationTip = u'试用版,{}天后过期'.format(self.appExpireInDays)
else:
self.expire(u'×系统时间混乱×\n程序退出')
def expire(self):
alert(u'本软件已过期,\n不能继续使用', u'试用过期啦~')
self.Quit()
def authOk(self, user):
self.user = user
self.cfg.user = user
mf = MainFrame(parent=None, title=self.user['name'] + u' 你好,欢迎使用本软件') #( {} )'.format(self.expirationTip) )
mf.app = self
if self.user.isAdmin():
ManagerPanel(mf, self)
else:
OperatorPanel(mf)
mf.maxWindow()
def getPrinter(self):
if not self.printer:
self.printer = wx.html.HtmlEasyPrinting()
return self.printer
def printSheet(self, sheet):
# self.printViaHtml( sheet )
self.getPrinter().GetPrintData().SetPaperSize( wx.Size(-1, 400) )
self.getPrinter().GetPrintData().PaperSize = wx.Size(-1, 400)
self.getPrinter().PrintText( self.getSheetHtml(sheet) )
def getSheetHtml(self, sheet):
data = sheet.getDict()
data['bigamount'] = cnNumber( data['amount'] )
return getPrintTpl().format( **data )
def printViaHtml(self, sheet):
filepath = os.path.join(self.cfg.cachepath, "{}.html".format(sheet['id']) )
file = open(filepath, 'wb')
file.write( self.getSheetHtml(sheet).encode('utf-8') )
file.close()
webbrowser.open(filepath)
# if '__main__'==__name__:
app = App(config, False) # True, os.path.join(ctx.dir, 'run.dat') )
App.instance = app
config.app = app
app.Run()
app.MainLoop()
| dlutxx/cement | main.py | Python | mit | 3,137 | 0.048659 |
# lance simulations pour different nombre d'electeurs
import multiprocessing
import os, sys
import shutil
import time
import numpy as np
from randomSets import *
def worker(((Ncandidats,q, Nwinners, Ntests))):
"""worker function"""
sys.stdout.write('\nSTART -- %i candidats -- \n' % Ncandidats)
sys.stdout.flush()
time.sleep(0.01) # being sure that simulation are differently initialized
minNvoters = findMinNvoters(Ncandidats, q =q, Nwinners = Nwinners, Ntests = Ntests)
with open('nmin-candidates-10-to-100-by-2.txt','a') as f_handle:
f_handle.write("%i " % Ncandidats)
np.savetxt(f_handle,minNvoters)
return
if __name__ == '__main__':
print "Cette fois, c'est la bonne !"
print (time.strftime("%H:%M:%S"))
root = "simulations/"
try:
os.mkdir(root)
except OSError:
pass
candidates = range(10,110, 2)
Nwinners = 1
minNvoters = np.zeros((len(candidates), Nwinners))
args = []
for i in range(len(candidates)):
arg = [candidates[i],200,1,100]
args.append(arg)
if args == []:
print "Rien a faire!"
pool = multiprocessing.Pool(processes=20)
pool.map(worker, args)
print "Alors, ca marche ? :)"
| plguhur/random-sets | simuFindNMin.py | Python | apache-2.0 | 1,243 | 0.015286 |
import json
from pprint import pprint
from sys import argv
jsonFile = argv[1]
with open(jsonFile) as data_file:
data = json.load(data_file)
for i in range(0, data['resultCount']):
if data['results'][i]['trackCount'] != 1:
print(data['results'][i]['collectionName']), data['results'][i]['releaseDate']
# sort by release date
pprint(data)
| mrricearoni/iTunesSearch | printRecentAlbums.py | Python | mit | 357 | 0.002801 |
#! /usr/bin/env python
import sys, os, getopt, struct, unittest
from distutils.spawn import spawn
build = True
verbosity = 2
here = os.path.dirname(os.path.abspath(__file__))
os.chdir(here)
def bits():
"""determine if running on a 32 bit or 64 bit platform
"""
return struct.calcsize("P") * 8
# -- parse options
try:
opts, args = getopt.getopt(sys.argv[1:], "nq")
if args:
raise getopt.GetoptError("too many arguments")
except getopt.GetoptError:
sys.exit("run-tests.py: error: %s" % sys.exc_info()[1])
for o, a in opts:
if o == "-q":
verbosity = 0
elif o == "-n":
build = False
# -- build greenlet
if build:
if verbosity == 0:
cmd = [sys.executable, "setup.py", "-q", "build_ext", "-q"]
else:
cmd = [sys.executable, "setup.py", "build_ext"]
spawn(cmd, search_path=0)
# -- find greenlet but skip the one in "."
if not build:
oldpath = sys.path[:]
sys.path.remove(here)
import greenlet
if not build:
sys.path[:] = oldpath
sys.stdout.write("python %s (%s bit) using greenlet %s from %s\n" %
(sys.version.split()[0], bits(), greenlet.__version__, greenlet.__file__))
# -- run tests
from tests import test_collector
suite = test_collector()
unittest.TextTestRunner(verbosity=verbosity).run(suite)
| ioram7/keystone-federado-pgid2013 | build/greenlet/run-tests.py | Python | apache-2.0 | 1,321 | 0.003785 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorboard.plugins.mesh.summary."""
import glob
import json
import os
import tensorflow as tf
from tensorboard.compat import tf2
from tensorboard.plugins.mesh import summary
from tensorboard.plugins.mesh import metadata
from tensorboard.plugins.mesh import plugin_data_pb2
from tensorboard.plugins.mesh import test_utils
try:
tf2.__version__ # Force lazy import to resolve
except ImportError:
tf2 = None
try:
tf.compat.v1.enable_eager_execution()
except AttributeError:
# TF 2.0 doesn't have this symbol because eager is the default.
pass
class MeshSummaryV2Test(tf.test.TestCase):
def setUp(self):
super(MeshSummaryV2Test, self).setUp()
if tf2 is None:
self.skipTest("v2 summary API not available")
def mesh_events(self, *args, **kwargs):
self.write_mesh_event(*args, **kwargs)
event_files = sorted(glob.glob(os.path.join(self.get_temp_dir(), "*")))
self.assertEqual(len(event_files), 1)
events = list(tf.compat.v1.train.summary_iterator(event_files[0]))
# Expect a boilerplate event for the file_version, then the vertices
# summary one.
num_events = 2
# All additional tensors (i.e. colors or faces) will be stored as separate
# events, so account for them as well.
num_events += len(frozenset(["colors", "faces"]).intersection(kwargs))
self.assertEqual(len(events), num_events)
# Delete the event file to reset to an empty directory for later calls.
os.remove(event_files[0])
return events[1:]
def write_mesh_event(self, *args, **kwargs):
kwargs.setdefault("step", 1)
writer = tf2.summary.create_file_writer(self.get_temp_dir())
with writer.as_default():
summary.mesh(*args, **kwargs)
writer.close()
def get_metadata(self, event):
return metadata.parse_plugin_metadata(
event.summary.value[0].metadata.plugin_data.content
)
def test_step(self):
"""Tests that different components of mesh summary share the same
step."""
tensor_data = test_utils.get_random_mesh(
100, add_faces=True, add_colors=True
)
config_dict = {"foo": 1}
events = self.mesh_events(
"a",
tensor_data.vertices,
faces=tensor_data.faces,
colors=tensor_data.colors,
config_dict=config_dict,
step=333,
)
self.assertEqual(333, events[0].step)
self.assertEqual(333, events[1].step)
self.assertEqual(333, events[2].step)
def test_tags(self):
"""Tests proper tags for each event/tensor."""
tensor_data = test_utils.get_random_mesh(
100, add_faces=True, add_colors=True
)
config_dict = {"foo": 1}
name = "foo"
events = self.mesh_events(
name,
tensor_data.vertices,
faces=tensor_data.faces,
colors=tensor_data.colors,
config_dict=config_dict,
step=333,
)
expected_names_set = frozenset(
name_tpl % name for name_tpl in ["%s_VERTEX", "%s_FACE", "%s_COLOR"]
)
actual_names_set = frozenset(
[event.summary.value[0].tag for event in events]
)
self.assertEqual(expected_names_set, actual_names_set)
expected_bitmask = metadata.get_components_bitmask(
[
plugin_data_pb2.MeshPluginData.VERTEX,
plugin_data_pb2.MeshPluginData.FACE,
plugin_data_pb2.MeshPluginData.COLOR,
]
)
for event in events:
self.assertEqual(
expected_bitmask, self.get_metadata(event).components
)
def test_pb(self):
"""Tests ProtoBuf interface."""
name = "my_mesh"
tensor_data = test_utils.get_random_mesh(
100, add_faces=True, add_colors=True
)
config_dict = {"foo": 1}
proto = summary.mesh_pb(
name,
tensor_data.vertices,
faces=tensor_data.faces,
colors=tensor_data.colors,
config_dict=config_dict,
)
plugin_metadata = metadata.parse_plugin_metadata(
proto.value[0].metadata.plugin_data.content
)
self.assertEqual(
json.dumps(config_dict, sort_keys=True), plugin_metadata.json_config
)
class MeshSummaryV2GraphTest(MeshSummaryV2Test, tf.test.TestCase):
def write_mesh_event(self, *args, **kwargs):
kwargs.setdefault("step", 1)
# Hack to extract current scope since there's no direct API for it.
with tf.name_scope("_") as temp_scope:
scope = temp_scope.rstrip("/_")
@tf2.function
def graph_fn():
# Recreate the active scope inside the defun since it won't propagate.
with tf.name_scope(scope):
summary.mesh(*args, **kwargs)
writer = tf2.summary.create_file_writer(self.get_temp_dir())
with writer.as_default():
graph_fn()
writer.close()
if __name__ == "__main__":
tf.test.main()
| tensorflow/tensorboard | tensorboard/plugins/mesh/summary_v2_test.py | Python | apache-2.0 | 5,916 | 0.000676 |
from i18nurls.application import Application as make_app
| jackTheRipper/iotrussia | web_server/lib/werkzeug-master/examples/i18nurls/__init__.py | Python | gpl-2.0 | 57 | 0 |
"""
Tests for efflux.telemetry.endpoint
"""
import unittest
import mock
from efflux.telemetry.endpoint import Telemetry
class Dummy(Telemetry):
def _set_route(self):
self.base_route = None
class TelemetryTests(unittest.TestCase):
'''Tests for EffluxEndpoint'''
MOCKS = [
]
def setUp(self):
for target in self.MOCKS:
patcher = mock.patch(target)
patcher.start()
self.addCleanup(patcher.stop)
self.domain = 'boom.efflux.io'
self.token = 12345
self.efflux = Dummy(
self.domain,
self.token
)
def test_check_fields(self):
l1 = ['foo', 'bar', 'baz']
l2 = ['foo', 'bar', 'baz', 'bob']
l3 = ['foo', 'wut']
# subset OK
self.assertTrue(
self.efflux.check_required_fields(l1, l2)
)
# equal OK
self.assertTrue(
self.efflux.check_required_fields(l1, l1)
)
# not subset
self.assertFalse(
self.efflux.check_required_fields(l3, l2)
)
def test_set_namespace(self):
orig = {
'foo': 'bar',
'baz': 'ball'
}
check = {
'ns_foo': 'bar',
'ns_baz': 'ball'
}
self.assertEqual(
check,
self.efflux.set_namespace('ns', orig)
)
| effluxsystems/pyefflux | tests/unit/test_base.py | Python | mit | 1,403 | 0 |
import pytz
from pendulum import _safe_timezone
from pendulum.tz.timezone import Timezone
def test_safe_timezone_with_tzinfo_objects():
tz = _safe_timezone(pytz.timezone("Europe/Paris"))
assert isinstance(tz, Timezone)
assert "Europe/Paris" == tz.name
| sdispater/pendulum | tests/test_main.py | Python | mit | 268 | 0 |
Subsets and Splits