text
stringlengths 4
1.02M
| meta
dict |
---|---|
import string, types, sys, os, StringIO, re, shlex, json, zipfile
from collections import OrderedDict
from django.contrib.auth.decorators import login_required
from django.core.servers.basehttp import FileWrapper
from django.http import HttpResponse, HttpResponseNotFound
from django.shortcuts import render_to_response, redirect, render
from django.template import RequestContext
from django.template.loader import render_to_string
from django.views.decorators.csrf import csrf_exempt
from lighthouse.forms.lapack_eprob import *
from lighthouse.models.lapack_eprob import *
import datetime
import json
# build a dictionary of questions and answers from form data
def findAnsweredQuestions(answered_questions):
results = OrderedDict()
for key,values in answered_questions.items():
field_label, field_choices = eprob_fields[key]
for value in values:
for shortval, longval in field_choices:
if value == shortval:
answers = ()
if field_label in results:
answers = results[field_label]
answers = answers + (longval,)
results.update({field_label:answers})
return results
# clear session variable for advanced guided search
def clearAdvancedAnsweredQuestions(request):
# for key, _ in eprob_fields.items():
# label = 'eprob_form_' + key
# if label in request.session:
# del request.session[label]
if 'eprob_advanced_answered' in request.session:
del request.session['eprob_advanced_answered']
request.session['eprob_advanced_current_form'] = 'start'
# clear session variable for guided search answered questions
def clearAnsweredQuestions(request):
# for key, _ in eprob_fields.items():
# label = 'eprob_form_' + key
# if label in request.session:
# del request.session[label]
if 'eprob_guided_answered' in request.session:
del request.session['eprob_guided_answered']
request.session['eprob_guided_current_form'] = 'start'
# generate the context for the guided search tab
def lapack_eprob_guided_context(request):
if 'eprob_guided_current_form' in request.session:
formname = request.session['eprob_guided_current_form']
else:
formname = 'start'
if 'eprob_guided_answered' in request.session:
answered_temp = request.session['eprob_guided_answered']
else:
answered_temp = OrderedDict()
# if it was submitted and isn't a clear request
if request.method == 'POST':
if request.POST.get('guided') != 'clear':
if formname not in ('start', 'finish'):
# if it's not the first or last page, check the form
# uses GuidedForm instead of FilteredForm for performance
form = GuidedForm(formname,request.POST)
if form.is_valid():
answered_temp.update({formname : (form.cleaned_data[formname],)})
# if we are at the beginning, reset everything
elif formname == 'start':
clearAnsweredQuestions(request)
answered_temp = OrderedDict()
# find the results and set the context variable
results = getFilteredList(answered_temp)
context = {
'results' : results,
}
# find the page that should be shown next
nextform = findNextForm(results,answered_temp)
# update session variables
request.session['eprob_guided_current_form'] = nextform
request.session['eprob_guided_answered'] = answered_temp
answered = findAnsweredQuestions(answered_temp)
if nextform != 'finish':
# build a list of answered questions and update the context
context.update({'eprob_guided_answered' : answered,
'content_eprob_guided_form':'lighthouse/lapack_eprob/guided/questions.html',
'guided_form' : FilteredForm(nextform,results),
})
else:
context.update({'eprob_guided_answered' : answered,
'content_eprob_guided_form':'lighthouse/lapack_eprob/guided/finished.html',
})
# render the result to the page
return context
# generate the context for the advanced tab
def lapack_eprob_advanced_context(request):
# retrieve session variables
if 'eprob_advanced_current_form' in request.session:
formname = request.session['eprob_advanced_current_form']
else:
formname = 'start'
if 'eprob_advanced_answered' in request.session:
answered_temp = request.session['eprob_advanced_answered']
else:
answered_temp = OrderedDict()
# if request was a POST request
if request.method == 'POST':
# and it wasn't a clear request
if request.POST.get('advanced') != 'clear':
# if it's not the beginning or the end of search
if formname not in ('start','finish'):
# generate a form for that page and check validity
sform = AdvancedForm(formname,request.POST)
if sform.is_valid():
# if it's valid, check all the fields for that page
for fname in eprob_advanced_forms[formname]:
for answer in sform.cleaned_data.get(fname):
# for each answer, update the list of values
answers = ()
if fname in answered_temp:
answers = answered_temp[fname]
answers = answers + (answer,)
answered_temp.update({fname:answers})
# debugging - if the form ever isn't valid bad things have happened
#else:
# return HttpResponse(formname + ' ' + json.dumps(sform.errors))
# if the formname is set to start, start over
if formname == 'start':
clearAdvancedAnsweredQuestions(request)
answered_temp = OrderedDict()
# find all possible results
results = getFilteredList(answered_temp)
context = {
'results' : results,
}
# figure out which form is next
nextform = findNextFormAdvanced(results,answered_temp)
# get a list of answered questions
answered = findAnsweredQuestions(answered_temp)
# update session variables
request.session['eprob_advanced_current_form'] = nextform
request.session['eprob_advanced_answered'] = answered_temp
if nextform != 'finish':
# get a readable list of answered questions
#update the context
context.update({'eprob_advanced_answered' : answered,
'content_eprob_advanced_form':'lighthouse/lapack_eprob/advanced/questions.html',
'advanced_form' : AdvancedFilteredForm(nextform,results),
})
else:
context.update({'eprob_advanced_answered' : answered,
'content_eprob_advanced_form':'lighthouse/lapack_eprob/advanced/finished.html',
})
return context
# handle common setup and render the page
def lapack_eprob(request):
if 'eprob_selectedRoutines' not in request.session:
request.session['eprob_selectedRoutines'] = []
selectedRoutines = request.session['eprob_selectedRoutines']
selectedRoutineNames = ()
for item in request.session['eprob_selectedRoutines']:
if item['checkState'] == 'checked':
selectedRoutineNames = selectedRoutineNames + (item['thePrecision']+item['routineName'],)
if 'eprob_guided_answered' not in request.session:
request.session['eprob_guided_answered'] = {}
if 'eprob_advanced_answered' not in request.session:
request.session['eprob_advanced_answered'] = {}
if 'eprob_guided_current_form' not in request.session:
request.session['eprob_guided_current_form'] = 'start'
if 'eprob_advanced_current_form' not in request.session:
request.session['eprob_advanced_current_form'] = 'start'
if 'eprob_current_tab' not in request.session:
request.session['eprob_current_tab'] = 'guided'
context = {
# 'selectedRoutines': selectedRoutines,
'selectedRoutineNames' : selectedRoutineNames,
'content_eprob_keywordSearch' : ''
}
current_tab = ''
# if the page was a submission, handle that accordingly
if request.method == 'POST':
if "advanced" in request.POST:
#if POST was a clear request
if request.POST.get('advanced') == 'clear':
clearAdvancedAnsweredQuestions(request)
request.session['eprob_current_tab'] = 'advanced'
elif "keyword" in request.POST:
request.session['eprob_current_tab'] = 'keyword'
elif "guided" in request.POST:
#if POST was a clear request
if request.POST.get('guided') == 'clear':
clearAnsweredQuestions(request)
request.session['eprob_current_tab'] = 'guided'
# render the page with the current tab active
current_tab = request.session['eprob_current_tab']
if current_tab == 'advanced':
# advanced search
# clear guided search
clearAnsweredQuestions(request)
context.update(lapack_eprob_guided_context(request))
context.update(lapack_eprob_advanced_context(request))
return render_to_response(
'lighthouse/lapack_eprob/index.html',
{'AdvancedTab': True},
context_instance=RequestContext(request,context)
)
elif current_tab == 'keyword':
# advanced search
# clear both guided and advanced search
clearAnsweredQuestions(request)
context.update(lapack_eprob_guided_context(request))
clearAdvancedAnsweredQuestions(request)
context.update(lapack_eprob_advanced_context(request))
return render_to_response(
'lighthouse/lapack_eprob/index.html',
{'KeywordTab': True},
context_instance=RequestContext(request,context)
)
else:
# guided search
# clear advanced search
clearAdvancedAnsweredQuestions(request)
context.update(lapack_eprob_advanced_context(request))
context.update(lapack_eprob_guided_context(request))
return render_to_response(
'lighthouse/lapack_eprob/index.html',
context_instance=RequestContext(request,context)
)
@csrf_exempt
def eprob_clear_session(request):
if request.is_ajax():
mode = request.POST.get('clear')
if mode == 'unchecked':
test = request.session['eprob_selectedRoutines']
request.session['eprob_selectedRoutines'] = []
for item in test:
if item['checkState'] == 'checked':
request.session['eprob_selectedRoutines'].append(item)
# Clear checked routines
elif mode == 'checked':
test = request.session['eprob_selectedRoutines']
request.session['eprob_selectedRoutines'] = []
for item in test:
if item['checkState'] == 'unchecked':
request.session['eprob_selectedRoutines'].append(item)
if mode == 'all':
request.session['eprob_selectedRoutines'] = []
return HttpResponse('cleared')
else:
return HttpResponse('only AJAX requests are allowed!')
@csrf_exempt
def eprob_update_session(request):
if request.is_ajax():
selectedRoutineNames = []
selectedRoutineList = [{
"thePrecision": request.POST.get('precision'),
"routineName": request.POST.get('routineName'),
"matrixType": request.POST.get('matrixType'),
"storageType": request.POST.get('storageType'),
"id": request.POST.get('idn'),
"url": request.POST.get('url'),
"checkState": request.POST.get('checkState')
}]
# Check if the routine already exists in request.session['selectedRoutines'], if it does save it's index
counter = 0
match = -1
for item in request.session['eprob_selectedRoutines']:
if item['thePrecision'] == selectedRoutineList[0]['thePrecision'] and item['routineName'] == selectedRoutineList[0]['routineName']:
match = counter # Save the index
if selectedRoutineList[0]['checkState'] == 'checked':
request.session['eprob_selectedRoutines'][counter]['checkState'] = 'checked'
if selectedRoutineList[0]['checkState'] == 'unchecked':
request.session['eprob_selectedRoutines'][counter]['checkState'] = 'unchecked'
counter += 1
if match == -1: # The routine does not exist in request.session['selectedRoutines'], so add it
request.session['eprob_selectedRoutines'] = request.session['eprob_selectedRoutines'] + selectedRoutineList
# Session was modified
request.session.modified = True
# Create a list of all checked routines
for item in request.session['eprob_selectedRoutines']:
if item['checkState'] == 'checked':
selectedRoutineNames.append(item['thePrecision']+item['routineName'] + ",")
# Return the list
return HttpResponse(selectedRoutineNames)
else:
return HttpResponse('only AJAX requests are allowed!') | {
"content_hash": "4aa29c9662493449db25c4664fded229",
"timestamp": "",
"source": "github",
"line_count": 346,
"max_line_length": 143,
"avg_line_length": 39.62138728323699,
"alnum_prop": 0.6168210664526953,
"repo_name": "LighthouseHPC/lighthouse",
"id": "fdf45681405514a28a999b9def306981cdc614fd",
"size": "13709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Dlighthouse/lighthouse/views/lapack_eprob.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import numpy as np
def select(table, predicate, label):
col = table.data.ca[label]
nchunks = col.nchunks
query = np.vectorize(predicate)
qs = list()
for nchunk in range(nchunks):
qi = list(query(col.chunks[nchunk][:]))
qs.append(qi)
return qs
def select2(table, predicate, labels):
cols = [table.data.ca[label] for label in labels]
nchunks = cols[0].nchunks
query = np.vectorize(predicate)
qs = list()
for nchunk in range(nchunks):
col1 = cols[0].chunks[nchunk][:]
col2 = cols[1].chunks[nchunk][:]
qi = list(query(col1, col2))
qs.append(qi)
return qs
| {
"content_hash": "4aac6fde6a972839585d494f1c360544",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 53,
"avg_line_length": 22.586206896551722,
"alnum_prop": 0.6030534351145038,
"repo_name": "seibert/blaze-core",
"id": "11ea9d10ff5ada939be992ad70fbb0340a54df90",
"size": "655",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blaze/algo/select.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "113059"
},
{
"name": "JavaScript",
"bytes": "64593"
},
{
"name": "Perl",
"bytes": "52"
},
{
"name": "Python",
"bytes": "876056"
},
{
"name": "Shell",
"bytes": "5109"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, include, url
from rest_framework.routers import DefaultRouter
from api import views as api_views
router = DefaultRouter(trailing_slash=False)
router.register('todos', api_views.TodoViewSet)
urlpatterns = router.urls
from django.shortcuts import redirect
urlpatterns += (
url('todos/', lambda r: redirect('/api/todos', permanent=True)),
)
| {
"content_hash": "0c30564f1b5d069e63a6430a0f387ea1",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 68,
"avg_line_length": 31.916666666666668,
"alnum_prop": 0.7754569190600522,
"repo_name": "dgouldin/djangocon-eu-2015",
"id": "0942074aa665c38c4cbfc2ce782932bc8371abaf",
"size": "383",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "djapi/api/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "329981"
},
{
"name": "HTML",
"bytes": "56492"
},
{
"name": "JavaScript",
"bytes": "213880"
},
{
"name": "Python",
"bytes": "9995"
}
],
"symlink_target": ""
} |
import os
import sys
import time
import errno
import select
import logging
import yaml
import argparse
import paramiko
def init_logger():
log = logging.getLogger('cluster_install')
log.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
return log
logger = init_logger()
EXISTING_VMS_PATH = 'existing_vms.yaml'
JUMP_HOST_DIR = '/home/{0}/cluster_install'
JUMP_HOST_SSH_KEY_PATH = '/home/{0}/.ssh/jump_host_key'
JUMP_HOST_ENV_IDS = JUMP_HOST_DIR + '/environment_ids.yaml'
JUMP_HOST_CONFIG_PATH = JUMP_HOST_DIR + '/config_env.yaml'
JUMP_HOST_LICENSE_PATH = JUMP_HOST_DIR + '/cloudify_license.yaml'
JUMP_HOST_PARSED_FLAGS_PATH = JUMP_HOST_DIR + '/parsed_flags.yaml'
JUMP_HOST_INSTALL_PATH = JUMP_HOST_DIR + '/install_from_jump_host.py'
def retry_with_sleep(func, *func_args, **kwargs):
retry_count = kwargs.get('retry_count', 15)
delay = kwargs.get('delay', 2)
for i in range(retry_count):
try:
return func(*func_args)
except Exception as e:
if i < retry_count - 1:
time.sleep(delay)
continue
else:
raise e
def _create_ssh_client_func(hostname, username, key_file):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(hostname=hostname, username=username, key_filename=key_file)
return client
def _create_ssh_client(hostname, username, key_file):
return retry_with_sleep(_create_ssh_client_func, hostname, username,
key_file) # waiting for the VM to run
def _blocking_exec_command(client, command):
# one channel per command
stdin, stdout, stderr = client.exec_command(command)
# get the shared channel for stdout/stderr/stdin
channel = stdout.channel
output = ''
# we do not need stdin.
stdin.close()
# indicate that we're not going to write to that channel anymore
channel.shutdown_write()
# read stdout/stderr in order to prevent read block hangs
print(stdout.channel.recv(len(stdout.channel.in_buffer)))
# chunked read to prevent stalls
while not channel.closed or channel.recv_ready() or \
channel.recv_stderr_ready():
# stop if channel was closed prematurely, and there is no data in the
# buffers.
got_chunk = False
readq, _, _ = select.select([stdout.channel], [], [], 180)
for c in readq:
if c.recv_ready():
output = stdout.channel.recv(len(c.in_buffer))
print(output)
got_chunk = True
if c.recv_stderr_ready():
# make sure to read stderr to prevent stall
output = stderr.channel.recv_stderr(len(c.in_stderr_buffer))
got_chunk = True
if not got_chunk \
and stdout.channel.exit_status_ready() \
and not stderr.channel.recv_stderr_ready() \
and not stdout.channel.recv_ready():
# indicate that we're not going to read from this channel anymore
stdout.channel.shutdown_read()
# close the channel
stdout.channel.close()
break # exit as remote side is finished and our bufferes are empty
# close all the pseudofiles
stdout.close()
stderr.close()
exit_status = stdout.channel.recv_exit_status()
if exit_status != 0:
raise Exception(output)
return output
def close_clients_connection(clients_list):
for client in clients_list:
client.close()
class VM(object):
def __init__(self, private_ip, public_ip, name, key_path, vm_username):
self.private_ip = private_ip
self.public_ip = public_ip
self.name = name
self.key_path = key_path
self.username = vm_username
self.client = _create_ssh_client(self.public_ip, self.username,
self.key_path)
def exec_command(self, command):
logger.debug('Running `{0}` on {1}'.format(command, self.name))
return _blocking_exec_command(self.client, command)
def _is_cluster_instance(self):
cluster_instances = ['postgresql', 'rabbitmq', 'manager']
for instance in cluster_instances:
if instance in self.name:
return True
return False
def get_node_id(self):
if not self._is_cluster_instance():
return
stdout = self.exec_command('cfy_manager node get-id')
return stdout[16:52]
def scp_local_to_remote(instance, source_path, destination_path):
os.system('scp -i {key_path} -o StrictHostKeyChecking=no -r '
'{source_path} {username}@{public_ip}:{destination_path}'.
format(key_path=instance.key_path, source_path=source_path,
username=instance.username, public_ip=instance.public_ip,
destination_path=destination_path))
def scp_remote_to_local(instance, source_path, destination_path):
os.system('scp -i {key_path} -o StrictHostKeyChecking=no -r '
'{username}@{public_ip}:{source_path} {destination_path}'.
format(key_path=instance.key_path, source_path=source_path,
username=instance.username, public_ip=instance.public_ip,
destination_path=destination_path))
def parse_command():
parser = argparse.ArgumentParser(description='Installing an Active-Active '
'manager cluster')
parser.add_argument('--config-path', action='store', type=str,
required=True, help='The config_env.yaml file path')
parser.add_argument('--clean-on-failure', dest='clean',
action='store_true', default=False,
help='Pass this flag if you want to clean your '
'Openstack environment on failure.\n'
'In the case of using existing VMs, the '
'environment won\'t be deleted.')
return parser.parse_args()
def get_dict_from_yaml(yaml_path):
with open(yaml_path) as f:
yaml_dict = yaml.load(f, yaml.Loader)
return yaml_dict
def silent_remove(filename):
try:
os.remove(filename)
except OSError as e:
if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory
raise # re-raise exception if a different error occurred
| {
"content_hash": "4b8783a8197ab8717ab80efcff9a209a",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 79,
"avg_line_length": 35.48148148148148,
"alnum_prop": 0.6128839844915002,
"repo_name": "cloudify-cosmo/cloudify-dev",
"id": "bbf03331c4191a654258b458f51b92a27c2044f2",
"size": "6706",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/cluster-install/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HCL",
"bytes": "7643"
},
{
"name": "HTML",
"bytes": "4402"
},
{
"name": "Python",
"bytes": "97431"
},
{
"name": "Ruby",
"bytes": "3483"
},
{
"name": "Shell",
"bytes": "28515"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
import sys
sys._running_pytest = True
import pytest
from sympy.core.cache import clear_cache
def pytest_report_header(config):
from sympy.utilities.misc import ARCH
s = "architecture: %s\n" % ARCH
from sympy.core.cache import USE_CACHE
s += "cache: %s\n" % USE_CACHE
from sympy.core.compatibility import GROUND_TYPES, HAS_GMPY
version = ''
if GROUND_TYPES =='gmpy':
if HAS_GMPY == 1:
import gmpy
elif HAS_GMPY == 2:
import gmpy2 as gmpy
version = gmpy.version()
s += "ground types: %s %s\n" % (GROUND_TYPES, version)
return s
def pytest_addoption(parser):
parser.addoption("--slow", dest="runslow", action="store_true",
help="allow slow tests to run")
def pytest_configure(config):
# register an additional marker
config.addinivalue_line("markers", "slow: slow test")
def pytest_runtest_setup(item):
if not isinstance(item, pytest.Function):
return
if not item.config.getvalue("runslow") and hasattr(item.obj, 'slow'):
pytest.skip("slow test: pass --slow to run")
def pytest_terminal_summary(terminalreporter):
if (terminalreporter.stats.get('error', None) or
terminalreporter.stats.get('failed', None)):
terminalreporter.write_sep(
' ', 'DO *NOT* COMMIT!', red=True, bold=True)
def pytest_runtest_teardown():
clear_cache()
| {
"content_hash": "85b02d15d4941495df2119ed52ea794e",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 73,
"avg_line_length": 28.096153846153847,
"alnum_prop": 0.6447638603696099,
"repo_name": "kmacinnis/sympy",
"id": "06bf053dc5204fb786599160bfbad8fdf5a499dd",
"size": "1461",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "sympy/conftest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "13573973"
},
{
"name": "Ruby",
"bytes": "304"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "1284"
},
{
"name": "TeX",
"bytes": "8790"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
} |
from typing import Any, Dict, List, Optional, Sequence, Union
from airflow.models import BaseOperator
from airflow.utils.context import Context
from airflow.utils.email import send_email
class EmailOperator(BaseOperator):
"""
Sends an email.
:param to: list of emails to send the email to. (templated)
:param subject: subject line for the email. (templated)
:param html_content: content of the email, html markup
is allowed. (templated)
:param files: file names to attach in email (templated)
:param cc: list of recipients to be added in CC field
:param bcc: list of recipients to be added in BCC field
:param mime_subtype: MIME sub content type
:param mime_charset: character set parameter added to the Content-Type
header.
:param custom_headers: additional headers to add to the MIME message.
"""
template_fields: Sequence[str] = ('to', 'subject', 'html_content', 'files')
template_fields_renderers = {"html_content": "html"}
template_ext: Sequence[str] = ('.html',)
ui_color = '#e6faf9'
def __init__(
self,
*,
to: Union[List[str], str],
subject: str,
html_content: str,
files: Optional[List] = None,
cc: Optional[Union[List[str], str]] = None,
bcc: Optional[Union[List[str], str]] = None,
mime_subtype: str = 'mixed',
mime_charset: str = 'utf-8',
conn_id: Optional[str] = None,
custom_headers: Optional[Dict[str, Any]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.to = to
self.subject = subject
self.html_content = html_content
self.files = files or []
self.cc = cc
self.bcc = bcc
self.mime_subtype = mime_subtype
self.mime_charset = mime_charset
self.conn_id = conn_id
self.custom_headers = custom_headers
def execute(self, context: Context):
send_email(
self.to,
self.subject,
self.html_content,
files=self.files,
cc=self.cc,
bcc=self.bcc,
mime_subtype=self.mime_subtype,
mime_charset=self.mime_charset,
conn_id=self.conn_id,
custom_headers=self.custom_headers,
)
| {
"content_hash": "307dfbc04ccf9cf70eab78704cb52c04",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 79,
"avg_line_length": 33.507246376811594,
"alnum_prop": 0.5994809688581315,
"repo_name": "lyft/incubator-airflow",
"id": "220bafa944b12d4a506753dfd6552499ee2ef9c6",
"size": "3099",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "airflow/operators/email.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17280"
},
{
"name": "HTML",
"bytes": "161328"
},
{
"name": "JavaScript",
"bytes": "25360"
},
{
"name": "Jinja",
"bytes": "8565"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10019710"
},
{
"name": "Shell",
"bytes": "220780"
}
],
"symlink_target": ""
} |
"""
test_distancetable
----------------------------------
Tests for `distancetable` module.
"""
import unittest
from smartrcs.scanner.distancetable import DistanceTable
class TestDistanceTable(unittest.TestCase):
def test___init__(self):
# Create a valid and common dt ad get inside table size
dt = DistanceTable(54)
size = len(dt._DistanceTable__table)
self.assertEqual(size, 54, 'Invalid distance table size')
# Create an invalid dt (with 0 size)
with self.assertRaises(ValueError):
DistanceTable(0)
# Create an invalid dt (with negative size)
with self.assertRaises(ValueError):
DistanceTable(-1)
def test_add(self):
# Create a distance table and add a distance
dt = DistanceTable(54)
dt.add(4, 8, 11.5)
# Test distance from A to B, and from B to A
self.assertEqual(dt._DistanceTable__table[4, 8], 11.5, 'Distance not added to table')
self.assertEqual(dt._DistanceTable__table[8, 4], 11.5, 'Distance from B to A not added to table')
def test_get(self):
# Create a distance table and add a distance
dt = DistanceTable(54)
dt.add(4, 8, 11.5)
# Test distance from A to B, and from B to A (with get())
self.assertEqual(dt.get(4, 8), 11.5, 'Invalid distance value')
self.assertEqual(dt.get(8, 4), 11.5, 'Invalid distance value')
def test_nearest(self):
# Create a distance table and add some distances
dt = DistanceTable(54)
dt.add(2, 3, 14.21)
dt.add(2, 7, 11.42)
dt.add(2, 10, 17.22)
dt.add(2, 5, 13.71)
dt.add(1, 10, 10.10)
# Test for nearest location
self.assertEqual(dt.nearest(2), 7, 'Invalid nearest location')
def test___str__(self):
# Create a distance table and add some distances
dt = DistanceTable(2)
dt.add(0, 0, 0.0)
dt.add(0, 1, 14.21)
dt.add(1, 0, 14.21)
dt.add(1, 1, 0.0)
# Test if string representation is done correctly
self.assertEqual(str(dt), ' 0 14 \n 14 0 \n')
# Create an incomplete distance table
dt = DistanceTable(2)
dt.add(0, 1, 14.21)
dt.add(1, 0, 14.21)
# Test for representation
self.assertEqual(str(dt), 'xxx 14 \n 14 xxx \n')
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
| {
"content_hash": "dc45040a635e9574403e1bdfbd84f471",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 105,
"avg_line_length": 28.788235294117648,
"alnum_prop": 0.5823457294646506,
"repo_name": "ottoszika/smartrcs",
"id": "51521b7a6e19371d0034b59f49337633560859bb",
"size": "2494",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/scanner/test_distancetable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "701"
},
{
"name": "HTML",
"bytes": "8548"
},
{
"name": "JavaScript",
"bytes": "9123"
},
{
"name": "Makefile",
"bytes": "2082"
},
{
"name": "Python",
"bytes": "270744"
}
],
"symlink_target": ""
} |
import sys
from typing import Any
from google.api_core.extended_operation import ExtendedOperation
from google.cloud import compute_v1
def wait_for_extended_operation(
operation: ExtendedOperation, verbose_name: str = "operation", timeout: int = 300
) -> Any:
"""
This method will wait for the extended (long-running) operation to
complete. If the operation is successful, it will return its result.
If the operation ends with an error, an exception will be raised.
If there were any warnings during the execution of the operation
they will be printed to sys.stderr.
Args:
operation: a long-running operation you want to wait on.
verbose_name: (optional) a more verbose name of the operation,
used only during error and warning reporting.
timeout: how long (in seconds) to wait for operation to finish.
If None, wait indefinitely.
Returns:
Whatever the operation.result() returns.
Raises:
This method will raise the exception received from `operation.exception()`
or RuntimeError if there is no exception set, but there is an `error_code`
set for the `operation`.
In case of an operation taking longer than `timeout` seconds to complete,
a `concurrent.futures.TimeoutError` will be raised.
"""
result = operation.result(timeout=timeout)
if operation.error_code:
print(
f"Error during {verbose_name}: [Code: {operation.error_code}]: {operation.error_message}",
file=sys.stderr,
flush=True,
)
print(f"Operation ID: {operation.name}", file=sys.stderr, flush=True)
raise operation.exception() or RuntimeError(operation.error_message)
if operation.warnings:
print(f"Warnings during {verbose_name}:\n", file=sys.stderr, flush=True)
for warning in operation.warnings:
print(f" - {warning.code}: {warning.message}", file=sys.stderr, flush=True)
return result
def create_template(project_id: str, template_name: str) -> compute_v1.InstanceTemplate:
"""
Create a new instance template with the provided name and a specific
instance configuration.
Args:
project_id: project ID or project number of the Cloud project you use.
template_name: name of the new template to create.
Returns:
InstanceTemplate object that represents the new instance template.
"""
# The template describes the size and source image of the boot disk
# to attach to the instance.
disk = compute_v1.AttachedDisk()
initialize_params = compute_v1.AttachedDiskInitializeParams()
initialize_params.source_image = (
"projects/debian-cloud/global/images/family/debian-11"
)
initialize_params.disk_size_gb = 250
disk.initialize_params = initialize_params
disk.auto_delete = True
disk.boot = True
# The template connects the instance to the `default` network,
# without specifying a subnetwork.
network_interface = compute_v1.NetworkInterface()
network_interface.name = "global/networks/default"
# The template lets the instance use an external IP address.
access_config = compute_v1.AccessConfig()
access_config.name = "External NAT"
access_config.type_ = "ONE_TO_ONE_NAT"
access_config.network_tier = "PREMIUM"
network_interface.access_configs = [access_config]
template = compute_v1.InstanceTemplate()
template.name = template_name
template.properties.disks = [disk]
template.properties.machine_type = "e2-standard-4"
template.properties.network_interfaces = [network_interface]
template_client = compute_v1.InstanceTemplatesClient()
operation = template_client.insert(
project=project_id, instance_template_resource=template
)
wait_for_extended_operation(operation, "instance template creation")
return template_client.get(project=project_id, instance_template=template_name)
# [END compute_template_create]
| {
"content_hash": "af67d6f9301c107cb2da95497d55d48b",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 102,
"avg_line_length": 37.598130841121495,
"alnum_prop": 0.6982351478995774,
"repo_name": "googleapis/python-compute",
"id": "1041206c9144ef0c80e947b7099e44bf456dfd52",
"size": "4854",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "samples/snippets/instance_templates/create.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "32681847"
},
{
"name": "Shell",
"bytes": "30663"
}
],
"symlink_target": ""
} |
from tests import unittest
from twisted.internet import defer
from synapse.storage.profile import ProfileStore
from synapse.types import UserID
from tests.utils import setup_test_homeserver
class ProfileStoreTestCase(unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
hs = yield setup_test_homeserver()
self.store = ProfileStore(hs)
self.u_frank = UserID.from_string("@frank:test")
@defer.inlineCallbacks
def test_displayname(self):
yield self.store.create_profile(
self.u_frank.localpart
)
yield self.store.set_profile_displayname(
self.u_frank.localpart, "Frank"
)
self.assertEquals(
"Frank",
(yield self.store.get_profile_displayname(self.u_frank.localpart))
)
@defer.inlineCallbacks
def test_avatar_url(self):
yield self.store.create_profile(
self.u_frank.localpart
)
yield self.store.set_profile_avatar_url(
self.u_frank.localpart, "http://my.site/here"
)
self.assertEquals(
"http://my.site/here",
(yield self.store.get_profile_avatar_url(self.u_frank.localpart))
)
| {
"content_hash": "c2b2be0d8cbd988e2d2112192625cf38",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 78,
"avg_line_length": 25.770833333333332,
"alnum_prop": 0.6273241713823767,
"repo_name": "howethomas/synapse",
"id": "1fa783f313ede08af3b01251c41497701f9d6264",
"size": "1841",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/storage/test_profile.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1020"
},
{
"name": "HTML",
"bytes": "1223"
},
{
"name": "JavaScript",
"bytes": "172643"
},
{
"name": "Perl",
"bytes": "31842"
},
{
"name": "Python",
"bytes": "1571632"
},
{
"name": "Shell",
"bytes": "3281"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion as deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('approved_comment', models.BooleanField(default=False)),
('post', models.ForeignKey(on_delete=deletion.CASCADE, related_name='comments', to='blog.Post')),
],
),
]
| {
"content_hash": "8d49ae34e9694587d01c93aea256052a",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 114,
"avg_line_length": 34.38461538461539,
"alnum_prop": 0.6006711409395973,
"repo_name": "kpi-web-guild/django-girls-blog-pavlenk0",
"id": "31216b0b6a19f6df8d1296f1add58accafa475fd",
"size": "964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blog/migrations/0002_comment.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "877"
},
{
"name": "HTML",
"bytes": "5080"
},
{
"name": "Python",
"bytes": "12368"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import frappe
import unittest
import requests
from frappe.utils import get_site_url
scripts = [
dict(
name='test_todo',
script_type = 'DocType Event',
doctype_event = 'Before Insert',
reference_doctype = 'ToDo',
script = '''
if "test" in doc.description:
doc.status = 'Closed'
'''
),
dict(
name='test_todo_validate',
script_type = 'DocType Event',
doctype_event = 'Before Insert',
reference_doctype = 'ToDo',
script = '''
if "validate" in doc.description:
raise frappe.ValidationError
'''
),
dict(
name='test_api',
script_type = 'API',
api_method = 'test_server_script',
allow_guest = 1,
script = '''
frappe.response['message'] = 'hello'
'''
),
dict(
name='test_return_value',
script_type = 'API',
api_method = 'test_return_value',
allow_guest = 1,
script = '''
frappe.flags = 'hello'
'''
),
dict(
name='test_permission_query',
script_type = 'Permission Query',
reference_doctype = 'ToDo',
script = '''
conditions = '1 = 1'
'''),
dict(
name='test_invalid_namespace_method',
script_type = 'DocType Event',
doctype_event = 'Before Insert',
reference_doctype = 'Note',
script = '''
frappe.method_that_doesnt_exist("do some magic")
'''
)
]
class TestServerScript(unittest.TestCase):
@classmethod
def setUpClass(cls):
frappe.db.commit()
frappe.db.sql('truncate `tabServer Script`')
frappe.get_doc('User', 'Administrator').add_roles('Script Manager')
for script in scripts:
script_doc = frappe.get_doc(doctype ='Server Script')
script_doc.update(script)
script_doc.insert()
frappe.db.commit()
@classmethod
def tearDownClass(cls):
frappe.db.commit()
frappe.db.sql('truncate `tabServer Script`')
frappe.cache().delete_value('server_script_map')
def setUp(self):
frappe.cache().delete_value('server_script_map')
def test_doctype_event(self):
todo = frappe.get_doc(dict(doctype='ToDo', description='hello')).insert()
self.assertEqual(todo.status, 'Open')
todo = frappe.get_doc(dict(doctype='ToDo', description='test todo')).insert()
self.assertEqual(todo.status, 'Closed')
self.assertRaises(frappe.ValidationError, frappe.get_doc(dict(doctype='ToDo', description='validate me')).insert)
def test_api(self):
response = requests.post(get_site_url(frappe.local.site) + "/api/method/test_server_script")
self.assertEqual(response.status_code, 200)
self.assertEqual("hello", response.json()["message"])
def test_api_return(self):
self.assertEqual(frappe.get_doc('Server Script', 'test_return_value').execute_method(), 'hello')
def test_permission_query(self):
self.assertTrue('where (1 = 1)' in frappe.db.get_list('ToDo', return_query=1))
self.assertTrue(isinstance(frappe.db.get_list('ToDo'), list))
def test_attribute_error(self):
"""Raise AttributeError if method not found in Namespace"""
note = frappe.get_doc({"doctype": "Note", "title": "Test Note: Server Script"})
self.assertRaises(AttributeError, note.insert)
| {
"content_hash": "dccce1cd55ee76b22ff76aa458aa2a05",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 115,
"avg_line_length": 27.21818181818182,
"alnum_prop": 0.6890447561790247,
"repo_name": "saurabh6790/frappe",
"id": "aac8b3deed8719531c396b71e345075c2b8e9210",
"size": "3095",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "frappe/core/doctype/server_script/test_server_script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63276"
},
{
"name": "HTML",
"bytes": "218921"
},
{
"name": "JavaScript",
"bytes": "2152738"
},
{
"name": "Less",
"bytes": "36947"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "3261616"
},
{
"name": "SCSS",
"bytes": "223084"
},
{
"name": "Shell",
"bytes": "3358"
},
{
"name": "Vue",
"bytes": "49860"
}
],
"symlink_target": ""
} |
import sys
print('%', ' '.join(sys.argv))
for line in sys.stdin:
sys.stdout.write(line)
| {
"content_hash": "4e25a494baa2fecba9ff68ac73460b31",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 30,
"avg_line_length": 23,
"alnum_prop": 0.6521739130434783,
"repo_name": "Lemma1/MAC-POSTS",
"id": "2a23d95fc20b81014bfb36fdc4c707f53e129da1",
"size": "114",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "doc_builder/sphinx-contrib/sadisplay/tests/fakecmd.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "3394"
},
{
"name": "Batchfile",
"bytes": "103388"
},
{
"name": "C",
"bytes": "5399"
},
{
"name": "C++",
"bytes": "3595985"
},
{
"name": "CMake",
"bytes": "53433"
},
{
"name": "CSS",
"bytes": "3618"
},
{
"name": "HTML",
"bytes": "18640"
},
{
"name": "JavaScript",
"bytes": "44610"
},
{
"name": "Jupyter Notebook",
"bytes": "7469541"
},
{
"name": "MATLAB",
"bytes": "5439"
},
{
"name": "Makefile",
"bytes": "148059"
},
{
"name": "Python",
"bytes": "1950140"
},
{
"name": "Shell",
"bytes": "2554"
}
],
"symlink_target": ""
} |
"""Utilities to automate running tests under app verifier."""
import os
import os.path
import re
import subprocess
import sys
import tempfile
import win32com.client.gencache as gencache
import xml.sax
import xml.sax.handler
TRACE_RE_ = re.compile('(?P<module>\w+)' # Module
'(?:'
'!' # Separator
'(?P<symbol>[^\+]+)' # Symbol
'(?:\+(?P<offset>[0-9a-f]+))?' # Optional hex offset
')?' # Sometimes we only get a module
'(?:\s+\(' # Optional file and line
'(?P<file>[^@]*)' # file
'\s+@\s*'
'(?P<line>\d+)' # line
'\))?'
, re.I)
# This is application verifier's automation library id.
LIBID_AppVerifier = '{DAB52BCB-6990-464A-AC61-F60C8EF60E24}'
try:
VerifierLib = gencache.EnsureModule(LIBID_AppVerifier, 0, 1, 0)
except: # pylint: disable=W0702
VerifierLib = None
class LogTrace:
def __init__(self, text):
d = TRACE_RE_.match(text).groupdict()
self.module = d['module']
self.symbol = d['symbol']
self.offset = d['offset']
self.file = d['file']
self.line = d['line']
def __str__(self):
ret = ''
if self.file and self.line:
ret = '%s (%s): AppVerifier warning C1: ' % (self.file, self.line)
ret = '%s%s!%s' % (ret, self.module, self.symbol)
if self.offset:
ret = "%s+%s" % (ret, self.offset)
return ret
class LogEntry:
def __init__(self, stopcode, layer, severity):
self.stopcode = int(stopcode, 0)
self.layer = layer
self.severity = severity
self.message = ''
self.trace = []
def __str__(self):
return "%s(%s, %s): %s\n\t%s" % (self.severity,
self.layer, self.stopcode, self.message,
'\n\t'.join(map(str,self.trace)))
class VerifierSaxHandler(xml.sax.handler.ContentHandler):
'''Sax event handler to parse the verifier log file.
The XML generated by verifier appears not to be standards compliant, so
e.g. xml.dom.minidom falls down while parsing it, hence this tedious
implementation.
'''
def __init__(self):
xml.sax.handler.ContentHandler.__init__(self)
self.log_entry = None
self.stack = []
self.element = None
self.errors = 0
self.text = ''
self.log_entries = []
def startElement(self, name, attrs):
self.stack.append((self.element, self.text))
self.element = name
self.text = []
if name == 'avrf:logEntry':
self.log_entry = LogEntry(attrs.getValue('StopCode'),
attrs.getValue('LayerName'),
attrs.getValue('Severity'))
self.errors += 1
def endElement(self, name):
if name == 'avrf:logEntry':
self.log_entries.append(self.log_entry)
self.log_entry = None
elif name == 'avrf:message':
self.log_entry.message = ''.join(self.text)
elif name == 'avrf:trace':
self.log_entry.trace.append(LogTrace(''.join(self.text)))
(self.element, self.text) = self.stack.pop()
def characters(self, content):
self.text.append(content)
class AppverifierTestRunner:
'''Encapsulates logic to run an executable under app verifier.
Interacts with application verifier's automation library to set default
verifier settings for a given target, process logs etc.
'''
# Checks we want enabled.
default_checks_ = [
# Basics group
"Core",
"Exceptions",
"Handles",
"Heaps",
"InputOutput",
"Leak",
"Locks",
"Memory",
"SRWLock",
"Threadpool",
"TLS",
# Misc group
"DangerousAPIs",
"DirtyStacks",
"TimeRollOver",
]
def __init__(self, break_on_stop):
self.manager = VerifierLib.AppVerifierManager()
self.break_on_stop = break_on_stop
def SetStopBreaks(self, check):
'''Configures all the stops for check to log only.
Arguments:
check: an app verifier check
'''
error_reporting = VerifierLib.constants.VerifierErrorReportingNoBreak
error_flags = (VerifierLib.constants.VerifierErrorFlagLogToFile |
VerifierLib.constants.VerifierErrorFlagLogStackTrace)
if self.break_on_stop:
error_reporting = VerifierLib.constants.VerifierErrorReportingBreakpoint
try:
for stop in check.Stops:
stop.ErrorReporting = error_reporting
stop.ErrorFlags = error_flags
except: # pylint: disable=W0702
# Accessing or enumerating Stops fails for some checks.
print 'Exception setting options for check', check.Name
def ResetImage(self, image_name):
'''Removes all verifier settings for image_name.
Arguments:
image_name: base name of thee image, e.g. "relink.exe"
'''
# Reset the settings for our image
try:
self.manager.Images.Remove(image_name)
except: # pylint: disable=W0702
# this fails if verifier had no settings for the image
pass
def SetImageDefaults(self, image_name, disabled_checks=[]):
'''Configures a default set of tests for image_name
Arguments:
image_name: the basename of a test, e.g. 'common_unittest.exe'
disabled_checks: A list of checks to disable, by top level category
name.
'''
self.ResetImage(image_name)
image = self.manager.Images.Add(image_name)
for check in image.Checks:
if (check.Name in self.default_checks_ and
check.Name not in disabled_checks):
check.Enabled = True
self.SetStopBreaks(check)
def ClearImageLogs(self, image_name):
'''Deletes all app verifier logs for image_name.
Arguments:
image_name: base name of thee image, e.g. "relink.exe"
'''
logs = self.manager.Logs(image_name)
if logs:
while logs.Count:
logs.Remove(0)
@staticmethod
def _ProcessLog(log_path):
'''Process the verifier log at log_path.
Arguments:
log_path: a full path to the log file.
Returns: a list of the log entries in the log.
'''
handler = VerifierSaxHandler()
xml.sax.parse(open(log_path, 'rb'), handler)
return handler.log_entries
@staticmethod
def _SaveLog(log):
'''Saves log to an XML file and returns the path to the file.
Arguments:
log: an appverifier log instance.
Returns: the full path to the temp file containing the resultant log file.
'''
(fd, path) = tempfile.mkstemp('.xml', 'verifier_log')
os.close(fd)
try:
log.SaveAsXML(path, '')
except: # pylint: disable=W0702
os.remove(path)
return None
return path
def ProcessLogs(self, test_name):
'''Processes all logs for image test_name.
Arguments:
test_name: the base name of the test executable.
Returns: A list of LogEntry instances for each error logged.
'''
logs = self.manager.Logs(test_name)
if not logs or not logs.Count:
return 0
errors = []
for log in logs:
path = self._SaveLog(log)
if path:
errors.extend(self._ProcessLog(path))
os.remove(path)
return errors
def RunTestWithVerifier(self, test_path):
'''Run a single test under verifier.
Arguments:
test_path: full or relative path to the test to run.
Returns:
A tuple with the test exit code and a list of verifier errors,
example:
(0, [error1, error2, ...])
'''
test_name = os.path.basename(test_path)
# Set up the verifier configuration
self.SetImageDefaults(test_name)
self.ClearImageLogs(test_name)
# run the test.
exit_code = subprocess.call(test_path)
# Clear the verifier settings for the image.
self.ResetImage(test_name)
# And process the logs.
errors = self.ProcessLogs(test_name)
return (exit_code, errors)
def DumpImages_(self):
for image in self.manager.Images:
print image.Name
for check in image.Checks:
print '\t', check.Name, check.Enabled
def DumpLogs_(self, image_name):
for l in self.manager.Logs(image_name):
print l
def RunTestWithVerifier(test_path, break_on_stop = False):
'''Runs test_path under app verifier.
Returns: a tuple of the exit code and list of errors
'''
runner = AppverifierTestRunner(break_on_stop)
return runner.RunTestWithVerifier(test_path)
def HasAppVerifier():
'''Returns true iff application verifier is installed.'''
if VerifierLib:
return True
return False
def Main():
'''Runs all tests on command line under verifier with stops disabled.'''
for test in sys.argv[1:]:
(dummy_exit_code, errors) = RunTestWithVerifier(test)
for error in errors:
print error
if __name__ == '__main__':
Main()
| {
"content_hash": "c242747c36ab958f8435e3fda6cc0389",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 80,
"avg_line_length": 26.614197530864196,
"alnum_prop": 0.6326104603966137,
"repo_name": "pombreda/syzygy",
"id": "2585cfa7545e5ec134ee3778558431a83dbb4fb0",
"size": "9229",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "syzygy/build/verifier.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "13748"
},
{
"name": "C",
"bytes": "8422"
},
{
"name": "C++",
"bytes": "7587976"
},
{
"name": "CSS",
"bytes": "1333"
},
{
"name": "HTML",
"bytes": "3182"
},
{
"name": "Protocol Buffer",
"bytes": "6472"
},
{
"name": "Python",
"bytes": "841811"
},
{
"name": "Shell",
"bytes": "19040"
}
],
"symlink_target": ""
} |
"""Climate platform for Advantage Air integration."""
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS
from homeassistant.helpers import entity_platform
from .const import (
ADVANTAGE_AIR_STATE_CLOSE,
ADVANTAGE_AIR_STATE_OFF,
ADVANTAGE_AIR_STATE_ON,
ADVANTAGE_AIR_STATE_OPEN,
DOMAIN as ADVANTAGE_AIR_DOMAIN,
)
from .entity import AdvantageAirEntity
ADVANTAGE_AIR_HVAC_MODES = {
"heat": HVAC_MODE_HEAT,
"cool": HVAC_MODE_COOL,
"vent": HVAC_MODE_FAN_ONLY,
"dry": HVAC_MODE_DRY,
"myauto": HVAC_MODE_AUTO,
}
HASS_HVAC_MODES = {v: k for k, v in ADVANTAGE_AIR_HVAC_MODES.items()}
AC_HVAC_MODES = [
HVAC_MODE_OFF,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_DRY,
]
ADVANTAGE_AIR_FAN_MODES = {
"auto": FAN_AUTO,
"low": FAN_LOW,
"medium": FAN_MEDIUM,
"high": FAN_HIGH,
}
HASS_FAN_MODES = {v: k for k, v in ADVANTAGE_AIR_FAN_MODES.items()}
FAN_SPEEDS = {FAN_LOW: 30, FAN_MEDIUM: 60, FAN_HIGH: 100}
ADVANTAGE_AIR_SERVICE_SET_MYZONE = "set_myzone"
ZONE_HVAC_MODES = [HVAC_MODE_OFF, HVAC_MODE_FAN_ONLY]
PARALLEL_UPDATES = 0
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up AdvantageAir climate platform."""
instance = hass.data[ADVANTAGE_AIR_DOMAIN][config_entry.entry_id]
entities = []
for ac_key, ac_device in instance["coordinator"].data["aircons"].items():
entities.append(AdvantageAirAC(instance, ac_key))
for zone_key, zone in ac_device["zones"].items():
# Only add zone climate control when zone is in temperature control
if zone["type"] != 0:
entities.append(AdvantageAirZone(instance, ac_key, zone_key))
async_add_entities(entities)
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
ADVANTAGE_AIR_SERVICE_SET_MYZONE,
{},
"set_myzone",
)
class AdvantageAirClimateEntity(AdvantageAirEntity, ClimateEntity):
"""AdvantageAir Climate class."""
_attr_temperature_unit = TEMP_CELSIUS
_attr_target_temperature_step = PRECISION_WHOLE
_attr_max_temp = 32
_attr_min_temp = 16
class AdvantageAirAC(AdvantageAirClimateEntity):
"""AdvantageAir AC unit."""
_attr_fan_modes = [FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH]
_attr_hvac_modes = AC_HVAC_MODES
_attr_supported_features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
def __init__(self, instance, ac_key):
"""Initialize an AdvantageAir AC unit."""
super().__init__(instance, ac_key)
self._attr_name = self._ac["name"]
self._attr_unique_id = f'{self.coordinator.data["system"]["rid"]}-{ac_key}'
if self._ac.get("myAutoModeEnabled"):
self._attr_hvac_modes = AC_HVAC_MODES + [HVAC_MODE_AUTO]
@property
def target_temperature(self):
"""Return the current target temperature."""
return self._ac["setTemp"]
@property
def hvac_mode(self):
"""Return the current HVAC modes."""
if self._ac["state"] == ADVANTAGE_AIR_STATE_ON:
return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"])
return HVAC_MODE_OFF
@property
def fan_mode(self):
"""Return the current fan modes."""
return ADVANTAGE_AIR_FAN_MODES.get(self._ac["fan"])
async def async_set_hvac_mode(self, hvac_mode):
"""Set the HVAC Mode and State."""
if hvac_mode == HVAC_MODE_OFF:
await self.async_change(
{self.ac_key: {"info": {"state": ADVANTAGE_AIR_STATE_OFF}}}
)
else:
await self.async_change(
{
self.ac_key: {
"info": {
"state": ADVANTAGE_AIR_STATE_ON,
"mode": HASS_HVAC_MODES.get(hvac_mode),
}
}
}
)
async def async_set_fan_mode(self, fan_mode):
"""Set the Fan Mode."""
await self.async_change(
{self.ac_key: {"info": {"fan": HASS_FAN_MODES.get(fan_mode)}}}
)
async def async_set_temperature(self, **kwargs):
"""Set the Temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
await self.async_change({self.ac_key: {"info": {"setTemp": temp}}})
class AdvantageAirZone(AdvantageAirClimateEntity):
"""AdvantageAir Zone control."""
_attr_hvac_modes = ZONE_HVAC_MODES
_attr_supported_features = SUPPORT_TARGET_TEMPERATURE
def __init__(self, instance, ac_key, zone_key):
"""Initialize an AdvantageAir Zone control."""
super().__init__(instance, ac_key, zone_key)
self._attr_name = self._zone["name"]
self._attr_unique_id = (
f'{self.coordinator.data["system"]["rid"]}-{ac_key}-{zone_key}'
)
@property
def hvac_mode(self):
"""Return the current state as HVAC mode."""
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
return HVAC_MODE_FAN_ONLY
return HVAC_MODE_OFF
@property
def current_temperature(self):
"""Return the current temperature."""
return self._zone["measuredTemp"]
@property
def target_temperature(self):
"""Return the target temperature."""
return self._zone["setTemp"]
async def async_set_hvac_mode(self, hvac_mode):
"""Set the HVAC Mode and State."""
if hvac_mode == HVAC_MODE_OFF:
await self.async_change(
{
self.ac_key: {
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_CLOSE}}
}
}
)
else:
await self.async_change(
{
self.ac_key: {
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_OPEN}}
}
}
)
async def async_set_temperature(self, **kwargs):
"""Set the Temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
await self.async_change(
{self.ac_key: {"zones": {self.zone_key: {"setTemp": temp}}}}
)
async def set_myzone(self, **kwargs):
"""Set this zone as the 'MyZone'."""
await self.async_change(
{self.ac_key: {"info": {"myZone": self._zone["number"]}}}
)
| {
"content_hash": "6a41e17999bf11125538756729c15721",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 86,
"avg_line_length": 31.590697674418603,
"alnum_prop": 0.5834805653710248,
"repo_name": "sander76/home-assistant",
"id": "1e6027b8db6117345f0bd5158188c6c9aa2ddb3d",
"size": "6792",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "homeassistant/components/advantage_air/climate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
print("hello")
print("world")
print("bye.")
| {
"content_hash": "a7c7f0102e3385f42e7ba7a2a68208dc",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 14,
"avg_line_length": 14.666666666666666,
"alnum_prop": 0.6363636363636364,
"repo_name": "go-python/gpython",
"id": "fdbccfc16cc829c60e1db632f4b5b9599f9fff3d",
"size": "209",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pytest/testdata/hello.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1205"
},
{
"name": "Go",
"bytes": "1479898"
},
{
"name": "HTML",
"bytes": "1653"
},
{
"name": "JavaScript",
"bytes": "13418"
},
{
"name": "Makefile",
"bytes": "227"
},
{
"name": "Python",
"bytes": "301848"
},
{
"name": "Shell",
"bytes": "1276"
},
{
"name": "Yacc",
"bytes": "36569"
}
],
"symlink_target": ""
} |
"""Self-test suite for Cryptodome.Cipher.DES3"""
import unittest
from binascii import hexlify
from Cryptodome.Cipher import DES3
from Cryptodome.Util.strxor import strxor_c
from Cryptodome.Util.py3compat import bchr, unhexlify, tostr
from Cryptodome.SelfTest.loader import load_tests
from Cryptodome.SelfTest.st_common import list_test_cases
# This is a list of (plaintext, ciphertext, key, description) tuples.
test_data = [
# Test vector from Appendix B of NIST SP 800-67
# "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block
# Cipher"
# http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf
('54686520717566636b2062726f776e20666f78206a756d70',
'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900',
'0123456789abcdef23456789abcdef01456789abcdef0123',
'NIST SP800-67 B.1'),
# This test is designed to test the DES3 API, not the correctness of the
# output.
('21e81b7ade88a259', '5c577d4d9b20c0f8',
'9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'),
]
# NIST CAVP test vectors
nist_tdes_mmt_files = ("TECBMMT2.rsp", "TECBMMT3.rsp")
for tdes_file in nist_tdes_mmt_files:
test_vectors = load_tests(("Cryptodome", "SelfTest", "Cipher", "test_vectors", "TDES"),
tdes_file,
"TDES ECB (%s)" % tdes_file,
{ "count" : lambda x: int(x) } )
assert(test_vectors)
for index, tv in enumerate(test_vectors):
# The test vector file contains some directive lines
if isinstance(tv, basestring):
continue
key = tv.key1 + tv.key2 + tv.key3
test_data_item = (tostr(hexlify(tv.plaintext)),
tostr(hexlify(tv.ciphertext)),
tostr(hexlify(key)),
"%s (%s)" % (tdes_file, index))
test_data.append(test_data_item)
class CheckParity(unittest.TestCase):
def test_parity_option2(self):
before_2k = unhexlify("CABF326FA56734324FFCCABCDEFACABF")
after_2k = DES3.adjust_key_parity(before_2k)
self.assertEqual(after_2k,
unhexlify("CBBF326EA46734324FFDCBBCDFFBCBBF"))
def test_parity_option3(self):
before_3k = unhexlify("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC")
after_3k = DES3.adjust_key_parity(before_3k)
self.assertEqual(after_3k,
unhexlify("ABABABABABABABABBABABABABABABABACDCDCDCDCDCDCDCD"))
def test_degradation(self):
sub_key1 = bchr(1) * 8
sub_key2 = bchr(255) * 8
# K1 == K2
self.assertRaises(ValueError, DES3.adjust_key_parity,
sub_key1 * 2 + sub_key2)
# K2 == K3
self.assertRaises(ValueError, DES3.adjust_key_parity,
sub_key1 + sub_key2 * 2)
# K1 == K2 == K3
self.assertRaises(ValueError, DES3.adjust_key_parity,
sub_key1 * 3)
# K1 == K2 (with different parity)
self.assertRaises(ValueError, DES3.adjust_key_parity,
sub_key1 + strxor_c(sub_key1, 1) + sub_key2)
class DegenerateToDESTest(unittest.TestCase):
def runTest(self):
sub_key1 = bchr(1) * 8
sub_key2 = bchr(255) * 8
# K1 == K2
self.assertRaises(ValueError, DES3.new,
sub_key1 * 2 + sub_key2,
DES3.MODE_ECB)
# K2 == K3
self.assertRaises(ValueError, DES3.new,
sub_key1 + sub_key2 * 2,
DES3.MODE_ECB)
# K1 == K2 == K3
self.assertRaises(ValueError, DES3.new,
sub_key1 *3,
DES3.MODE_ECB)
# K2 == K3 (parity is ignored)
self.assertRaises(ValueError, DES3.new,
sub_key1 + sub_key2 + strxor_c(sub_key2, 0x1),
DES3.MODE_ECB)
def get_tests(config={}):
from common import make_block_tests
tests = []
tests = make_block_tests(DES3, "DES3", test_data)
tests.append(DegenerateToDESTest())
tests += list_test_cases(CheckParity)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| {
"content_hash": "a84d33f1b4d28705bd3fe12fbfa5a9cf",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 91,
"avg_line_length": 33.76335877862596,
"alnum_prop": 0.5880624010852362,
"repo_name": "chronicwaffle/PokemonGo-DesktopMap",
"id": "4049e67e25ffc717d02f9998cd90d1dbb4e52002",
"size": "5540",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/pylibs/osx64/Cryptodome/SelfTest/Cipher/test_DES3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "29260"
},
{
"name": "JavaScript",
"bytes": "52980"
},
{
"name": "Python",
"bytes": "11998498"
},
{
"name": "Shell",
"bytes": "4097"
}
],
"symlink_target": ""
} |
import Axon
import time
import Queue
class ThreadWrap(Axon.ThreadedComponent.threadedcomponent):
Inboxes = {
"inbox":"From the outside world",
"control":"From the outside world",
"_inbox":"From the component to go to the outside world",
"_control":"From the component to go to the outside world",
}
Outboxes = {
"outbox":"To the outside world",
"signal":"To the outside world",
"_outbox":"From the outside world to go to the component",
"_signal":"From the outside world to go to the component",
}
def __init__(self, someComponent):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(ThreadWrap,self).__init__()
self.comp = someComponent
self.inboundData = Queue.Queue()
self.outboundData = Queue.Queue()
def put(self, *args):
self.inboundData.put(*args)
def get(self):
return self.outboundData.get_nowait()
def main(self):
"""Main loop."""
self.addChildren(self.comp)
self.link((self,"_outbox"), (self.comp,"inbox"))
self.link((self,"_signal"), (self.comp,"control"))
self.link((self.comp,"outbox"), (self,"_inbox"))
self.link((self.comp,"signal"), (self,"_control"))
for child in self.children:
child.activate()
# run until all child components have terminated
# at which point this component can implode
# becuase they are children, if they terminate, we'll be woken up
while not self.childrenDone():
# We manually forward the data here. There are probably nicer methods, but for the
# moment, lets stick to brute force/clarity
time.sleep(0.01) # so that we're not totally spinning
while self.dataReady("inbox"):
self.send(self.recv("inbox"), "_outbox")
while self.dataReady("control"):
self.send(self.recv("control"), "_signal")
while self.dataReady("_inbox"):
self.outboundData.put( (self.recv("_inbox"), "outbox") )
while self.dataReady("_control"):
self.send(self.recv("_control"), "signal")
def childrenDone(self):
"""Unplugs any children that have terminated, and returns true if there are no
running child components left (ie. their microproceses have finished)
"""
for child in self.childComponents():
if child._isStopped():
self.removeChild(child) # deregisters linkages for us
return 0==len(self.childComponents())
if __name__=="__main__":
from Kamaelia.Util.Console import ConsoleReader, ConsoleEchoer
from Kamaelia.Chassis.Pipeline import Pipeline
import time
class Waiter(Axon.Component.component):
def main(self):
print "RUNNING"
t = time.time()
while time.time()-t >2:
yield 1
print "DONE"
# All the following run as you would expect at this stage
if 0:
Waiter().run()
if 0:
ThreadWrap( Waiter() ).run()
if 1:
Pipeline(
ThreadWrap(ConsoleReader()),
ConsoleEchoer(),
).run()
| {
"content_hash": "39852bda685b81798dad17085c9b8518",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 92,
"avg_line_length": 32.28,
"alnum_prop": 0.5926270136307311,
"repo_name": "bbc/kamaelia",
"id": "af378333caf437c681d078b67c053af52da829d4",
"size": "4288",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Sketches/MPS/Experiments/Likefile2/likefile/deprecated/ThreadWrap.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "62985"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "Diff",
"bytes": "483"
},
{
"name": "Gettext Catalog",
"bytes": "3919909"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "Makefile",
"bytes": "5768"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "31234"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Pure Data",
"bytes": "7485482"
},
{
"name": "Python",
"bytes": "18896320"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "711244"
}
],
"symlink_target": ""
} |
"""Custom news feeds for the multilingual project blog."""
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from multilingual_news.feeds import NewsEntriesFeed
class CustomNewsEntriesFeed(NewsEntriesFeed):
"""
Customized to get the universal content link, that is independent of the
currently active language.
Chooses the correct fallback language in case the active one does not
exist.
"""
def item_link(self, item):
language = None
try:
language = item.language_code
slug = item.slug
except ObjectDoesNotExist:
pass
else:
return reverse('news_detail', kwargs={'slug': slug})
if language is None:
try:
trans = item.translations.get(language_code='en')
except ObjectDoesNotExist:
pass
else:
language = trans.language_code
slug = trans.slug
if language is None:
language = item.translations.all()[0].language_code
slug = item.translations.all()[0].slug
return '{0}?lang={1}'.format(reverse('news_detail', kwargs={
'slug': slug}), language)
| {
"content_hash": "81786fcb1f1b24e3aed823e0ca459e10",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 76,
"avg_line_length": 33.21052631578947,
"alnum_prop": 0.6109350237717908,
"repo_name": "bigee/django-multilingual-project-blog",
"id": "6665600e383d206556f91b9ad4c798a0b073f472",
"size": "1262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "multilingual_project_blog/feeds.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "26606"
},
{
"name": "JavaScript",
"bytes": "114903"
},
{
"name": "Python",
"bytes": "41614"
}
],
"symlink_target": ""
} |
import numpy as np
import re
from collections import defaultdict
from contextlib import closing
import MySQLdb
from webservices.ndwserror import NDWSError
import logging
logger = logging.getLogger("neurodata")
"""
.. module:: ramonddb
:synopsis: Manipulate/create/read annotations in the ramon format
.. moduleauthor:: Kunal Lillaney <lillaney@jhu.edu>
"""
class MySQLRamonDB:
def __init__ (self, proj):
"""Connect with the brain databases"""
self.proj = proj
# Connection info for the metadata
try:
self.conn = MySQLdb.connect (host = self.proj.host, user = self.proj.kvengine_user, passwd = self.proj.kvengine_password, db = self.proj.dbname)
self.cursor = self.conn.cursor()
except MySQLdb.Error, e:
self.conn = None
logger.error("Failed to connect to database: {}, {}".format(self.proj.host, self.proj.dbname))
def close ( self ):
"""Close the connection"""
if self.cursor:
self.cursor.close()
if self.conn:
self.conn.close()
def commit ( self ):
"""Commit the transaction. Moved out of __del__ to make explicit."""
if self.cursor is not None:
self.cursor.close()
self.conn.commit()
def startTxn ( self ):
"""Start a transaction. Ensure database is in multi-statement mode."""
self.cursor = self.conn.cursor()
sql = "START TRANSACTION"
self.cursor.execute ( sql )
def rollback ( self ):
"""Rollback the transaction. To be called on exceptions."""
self.cursor.close()
self.conn.rollback()
def nextID ( self, ch ):
"""Get an new identifier. This is it's own txn and should not be called inside another transaction."""
with closing(self.conn.cursor()) as cursor:
# LOCK the table to prevent race conditions on the ID
sql = "LOCK TABLES {} WRITE".format(ch.getIdsTable())
try:
cursor.execute ( sql )
# Query the current max identifier
sql = "SELECT max(id) FROM {}".format(ch.getIdsTable())
try:
cursor.execute ( sql )
except MySQLdb.Error, e:
logger.error ( "Failed to create annotation identifier {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
# Here we've queried the highest id successfully
row = cursor.fetchone ()
# if the table is empty start at 1, 0 is no
if ( row[0] == None ):
identifier = 1
else:
identifier = int ( row[0] ) + 1
# increment and update query
sql = "INSERT INTO {} VALUES ({})".format(ch.getIdsTable(), identifier)
try:
cursor.execute ( sql )
except MySQLdb.Error, e:
logger.error ( "Failed to insert into identifier table: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
finally:
sql = "UNLOCK TABLES"
cursor.execute ( sql )
self.conn.commit()
return identifier
def setID ( self, ch, annoid ):
"""Set a user specified identifier in the ids table"""
with closing(self.conn.cursor()) as cursor:
# LOCK the table to prevent race conditions on the ID
sql = "LOCK TABLES {} WRITE".format( ch.getIdsTable() )
try:
# try the insert, get ane exception if it doesn't work
sql = "INSERT INTO {} VALUES({})".format(ch.getIdsTable(), annoid)
try:
cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Failed to set identifier table: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
finally:
sql = "UNLOCK TABLES"
cursor.execute ( sql )
self.conn.commit()
return annoid
#
# setBatchID
#
# Place the user selected id into the ids table
#
def setBatchID ( self, annoidList ):
""" Set a user specified identifier """
with closing(self.conn.cursor()) as cursor:
# LOCK the table to prevent race conditions on the ID
sql = "LOCK TABLES {} WRITE".format(self.proj.getIdsTable())
try:
# try the insert, get and if it doesn't work
sql = "INSERT INTO {} VALUES ( %s ) ".format( str(self.proj.getIdsTable()) )
try:
cursor.executemany ( sql, [str(i) for i in annoidList] )
except MySQLdb.Error, e:
logger.warning ( "Failed to set identifier table: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
finally:
sql = "UNLOCK TABLES"
cursor.execute ( sql )
self.conn.commit()
return annoidList
def reserve ( self, ch, count ):
"""Reserve contiguous identifiers. This is it's own txn and should not be called inside another transaction."""
with closing(self.conn.cursor()) as cursor:
# LOCK the table to prevent race conditions on the ID
sql = "LOCK TABLES {} WRITE".format( ch.getIdsTable() )
try:
cursor.execute ( sql )
# Query the current max identifier
sql = "SELECT max(id) FROM {}".format( ch.getIdsTable() )
try:
cursor.execute ( sql )
except MySQLdb.Error, e:
logger.error ( "Failed to create annotation identifier {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
# Here we've queried the highest id successfully
row = cursor.fetchone ()
# if the table is empty start at 1, 0 is no
if ( row[0] == None ):
identifier = 0
else:
identifier = int ( row[0] )
# increment and update query
sql = "INSERT INTO {} VALUES ({}) ".format(ch.getIdsTable(), identifier+count)
try:
cursor.execute ( sql )
except MySQLdb.Error, e:
logger.error ( "Failed to insert into identifier table: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
except Exception, e:
logger.error ( "Failed to insert into identifier table: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
finally:
sql = "UNLOCK TABLES"
cursor.execute ( sql )
self.conn.commit()
return identifier+1
def getAnnotationKV ( self, ch, annid ):
sql = "SELECT kv_key, kv_value FROM {}_ramon WHERE annoid='{}'".format(ch.channel_name,annid)
try:
self.cursor.execute ( sql )
pairs = self.cursor.fetchall()
except MySQLdb.Error, e:
logger.error ( "Failed to fetch annotation: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
if len(pairs) == 0:
logger.error( "Failed to fetch annotation: {}: No annotation object found. sql={}".format( annid, sql ) )
raise NDWSError( "Failed to fetch annotation: {}: No annotation object found. sql={}".format( annid, sql ) )
# convert answer into a dictionary
kvdict = defaultdict(list)
for (k,v) in pairs:
# detect multiple key values
if kvdict[k]:
if type(kvdict[k]) == list:
kvdict[k].append(v)
else:
kvdict[k] = [kvdict[k],v]
else:
kvdict[k]=v
return kvdict
def putAnnotationKV ( self, ch, annid, kvdict, update=False ):
"""store an HDF5 annotation to the database"""
if update:
self.deleteAnnotation ( ch, annid )
sql = "INSERT INTO {}_ramon (annoid, kv_key, kv_value) VALUES (%s,%s,%s)".format(ch.channel_name)
data = []
for (k,v) in kvdict.iteritems():
# blowout lists to multiple values
if type(v) in [list,tuple]:
[ data.append((annid,k,vv)) for vv in v ]
else:
data.append((annid,k,v))
try:
self.cursor.executemany ( sql, data )
except MySQLdb.Error, e:
logger.error ( "Failed to put annotation: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
def deleteAnnotation ( self, ch, annoid ):
"""delete an HDF5 annotation from the database"""
sql = "DELETE FROM {}_ramon WHERE annoid={}".format(ch.channel_name,annoid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.error ( "Failed to delete annotation: {}: {}. sql={}".format(e.args[0], e.args[1], sql))
raise
# getKVQuery
# Return a list of annotation object IDs that match a specific key/value string
def getKVQuery ( self, ch, qkey, qvalue ):
"""Return a list of annotation object ids that match equality predicates on key value."""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key = '{}' AND kv_value = '{}'".format(ch.channel_name, qkey, qvalue)
try:
self.cursor.execute ( sql )
annoids = np.array ( self.cursor.fetchall(), dtype=np.uint32 ).flatten()
except MySQLdb.Error, e:
logger.error ( "Error retrieving ids: %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise
return np.array(annoids)
def getTopKeys ( self, ch, count, anntype ):
"""Return the count top keys in the database."""
if anntype == None:
sql = "SELECT kv_key FROM {}_ramon GROUP BY kv_key ORDER BY COUNT(kv_key) LIMIT {}".format(ch.channel_name, count)
else:
sql = "SELECT kv_key FROM {}_ramon WHERE annoid in (select annoid from anno_ramon where kv_key = 'ann_type' and kv_value = {}) GROUP BY kv_key ORDER BY COUNT(kv_key) LIMIT {}".format(ch.channel_name, anntype, count)
try:
self.cursor.execute ( sql )
topkeys = list(self.cursor.fetchall())
except MySQLdb.Error, e:
logger.error ( "Error retrieving ids: %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise
return topkeys
# getAnnoObjects:
# Return a list of annotation object IDs
# for now by type and status
def getAnnoObjects ( self, ch, args ):
"""Return a list of annotation object ids that match equality predicates.
Legal predicates are currently:
type
status
Predicates are given in a dictionary.
"""
# legal equality fields
eqfields = ( 'type', 'status' )
# legal comparative fields
compfields = ( 'confidence' )
# dictionary to look up key name. this should be rewritten to
# be replaced in a higher level module. the fields are defined
# in annotation.py
key_name = { 'type' : 'ann_type',\
'status' : 'ann_status',\
'confidence' : 'ann_confidence' }
# start of the SQL clause
sql = "SELECT annoid FROM {}_ramon".format(ch.channel_name)
clause = ''
limitclause = ""
# iterate over the predicates
it = iter(args)
try:
field = it.next()
# build a query for all the predicates
while ( field ):
# provide a limit clause for iterating through the database
if field == "limit":
val = it.next()
if not re.match('^\d+$',val):
logger.warning ( "Limit needs an integer. Illegal value:%s" % (field,val) )
raise OCPCAError ( "Limit needs an integer. Illegal value:%s" % (field,val) )
limitclause = " LIMIT %s " % (val)
# all other clauses
else:
if clause == '':
clause += " WHERE "
else:
clause += ' AND '
if field in compfields:
opstr = it.next()
if opstr == 'lt':
op = ' < '
elif opstr == 'gt':
op = ' > '
else:
logger.warning ( "Not a comparison operator: %s" % (opstr) )
raise OCPCAError ( "Not a comparison operator: %s" % (opstr) )
val = it.next()
if not re.match('^[\d\.]+$',val):
logger.warning ( "For field %s. Illegal value:%s" % (field,val) )
raise OCPCAError ( "For field %s. Illegal value:%s" % (field,val) )
clause += "kv_key = '%s' AND kv_value %s %s" % ( key_name[field], op, val )
# all other fields have equality predicates
# rewrite those in interface
elif field in eqfields:
val = it.next()
clause += "kv_key = '%s' AND kv_value = '%s'" % ( key_name[field], val )
# all others are kv equality
else:
val = it.next()
clause += "kv_key = '%s' AND kv_value = '%s'" % ( field, val )
field = it.next()
except StopIteration:
pass
sql += clause + limitclause + ';'
try:
self.cursor.execute ( sql )
annoids = np.array ( self.cursor.fetchall(), dtype=np.uint32 ).flatten()
except MySQLdb.Error, e:
logger.error ( "Error retrieving ids: %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise
return np.array(annoids)
def querySegments ( self, ch, annid ):
"""Return segments that belong to this neuron"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'seg_neuron', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying neuron segments %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying neuron segments %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def queryROIChildren ( self, ch, annid ):
"""Return children that belong to this ROI"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'roi_parent', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying children %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying children %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def queryNodeChildren ( self, ch, annid ):
"""Return children that belong to this ROI"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'roi_parent', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying children %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying children %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def querySkeletonNodes ( self, ch, annid ):
"""Return the nodes that belong to this skeleton"""
# get the root node of the skeleton
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='node_skeleton' and kv_value={}".format(ch.channel_name, annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying skeleton nodes %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying skeleton nodes %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def querySynapses ( self, ch, annid ):
"""Return synapses that belong to this segment"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'syn_segments', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def queryPreSynapses ( self, ch, annid ):
"""Return presynaptic synapses that belong to this segment"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'syn_presegments', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def queryPostSynapses ( self, ch, annid ):
"""Return postsynaptic synapses that belong to this segment"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'syn_postsegments', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
def queryOrganelles ( self, ch, annid ):
"""Return organelles that belong to this segment"""
sql = "SELECT annoid FROM {}_ramon WHERE kv_key='{}' AND kv_value={}".format(ch.channel_name, 'org_segment', annid)
try:
self.cursor.execute ( sql )
except MySQLdb.Error, e:
logger.warning ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
raise NDWSError ( "Error querying synapses %d: %s. sql=%s" % (e.args[0], e.args[1], sql))
return np.array(self.cursor.fetchall(), dtype=np.uint32).flatten()
| {
"content_hash": "bcb10da95d378a0159fad6e8af7420d6",
"timestamp": "",
"source": "github",
"line_count": 504,
"max_line_length": 221,
"avg_line_length": 33.8531746031746,
"alnum_prop": 0.5951236666275935,
"repo_name": "neurodata/ndstore",
"id": "ac6cde46546080e026ef5f1ab708d265b4dec3d4",
"size": "17661",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ndramon/mysqlramondb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "43094"
},
{
"name": "HTML",
"bytes": "83782"
},
{
"name": "JavaScript",
"bytes": "75900"
},
{
"name": "Nginx",
"bytes": "1743"
},
{
"name": "Python",
"bytes": "1491127"
},
{
"name": "Shell",
"bytes": "14105"
}
],
"symlink_target": ""
} |
"""
flasticket
~~~~~~~~~~~~~
comment: Simple Ticket Reservation System
:copyright: (c) 2014 by liks. ( Jou Sung Shik, liks79 __at__ gmail.com )
:license: MIT LICENSE 2.0 (http://opensource.org/licenses/MIT).
"""
from flask import Flask, render_template, request, session, Response, redirect, url_for
import datetime
from User import User, db
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.secret_key = 'flasticket_secret'
db.create_all()
@app.route('/')
def index():
return render_template('index.html')
@app.route('/list')
def list():
users = User.query.all()
return render_template('list.html', users=users)
@app.route('/add', methods=['POST'])
def add():
rdate = request.form['rdate']
order = request.form['order']
person = request.form['person']
name = request.form['name']
email = request.form['email']
tel = request.form['tel']
user = User(rdate, order, person, name, email, tel)
print rdate, order, person, name, email, tel
db.session.add(user)
db.session.commit()
return redirect('/list')
@app.errorhandler(404)
def page_not_found(e):
return redirect('/')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
| {
"content_hash": "019ad420ed0f8832d6e288ef769bb248",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 87,
"avg_line_length": 21.964912280701753,
"alnum_prop": 0.639776357827476,
"repo_name": "liks79/flasticket",
"id": "ef720e71e5c6572e603bff5f440d07e6b2df71ec",
"size": "1276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flasticket.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "564"
}
],
"symlink_target": ""
} |
"""
Demo microbes service and methods
"""
__author__ = 'Dan Gunter <dkgunter@lbl.gov>, Bill Riehl <wjriehl@lbl.gov>, Michael Sneddon <mwsneddon@lbl.gov>, Roman Sutormin <rsutormin@lbl.gov>'
__date__ = '11/15/13'
## Imports
# Stdlib
import json
import os
import random
import numbers
import uuid
import hashlib
import re
import sys
# Local
import biokbase.narrative.common.service as service
from biokbase.narrative.common.service import init_service, method, finalize_service
from biokbase.workspace.client import Workspace as workspaceService
from biokbase.InvocationService.Client import InvocationService
from biokbase.fbaModelServices.Client import fbaModelServices
from biokbase.GenomeComparison.Client import GenomeComparison
from biokbase.assembly.client import Client as ArastClient
from biokbase.KBaseTrees.Client import KBaseTrees
## Globals
VERSION = (0, 0, 1)
NAME = "Microbes Metabolic Modeling"
# Initialize
init_service(name=NAME, desc="Demo workflow microbes service", version=VERSION)
@method(name="Build a Metabolic Model")
def _genome_to_fba_model(meth, genome_id, fba_model_id):
"""Given an annotated Genome, build a draft metabolic model which can be analyzed with FBA. [6]
:param genome_id: Source genome name [6.1]
:type genome_id: kbtypes.KBaseGenomes.Genome
:ui_name genome_id: Genome Name
:param fba_model_id: select a name for the generated metabolic model (optional) [6.2]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: Output Metabolic Model Name
:return: Generated Metabolic Model ID
:rtype: kbtypes.KBaseFBA.FBAModel
:output_widget: kbaseModelTabs
"""
"""
Old output widget that was used:
:output_widget: kbaseModelMetaNarrative
Options that we should expose at some point:
:param fba_model_template_id: specify a custom template for building the model (optional) [6.3]
:type fba_model_template_id: kbtypes.Unicode
:ui_name fba_model_template_id: FBA Model Template
:param prob_annot: set to 1 to indicate that probabilistic annotations should be used (optional) [6.4]
:type prob_annot: kbtypes.Unicode
:ui_name prob_annot: Use Probabilitstic Annotations?
:param core_model: set to 1 to indicate that a core metabolic model should be constructed instead of a full genome scale model (optional) [6.5]
:type core_model: kbtypes.Unicode
:ui_name core_model: Core Model Only?
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting")
meth.advance("Building your new FBA model")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id
fbaClient = fbaModelServices(service.URLS.fba,token=userToken)
# create the model object
build_fba_params = {
'genome': genome_id,
'workspace': workspaceName
}
if fba_model_id:
fba_model_id = fba_model_id.strip()
build_fba_params['model']=fba_model_id
#if core_model:
# build_fba_params['coremodel']=1
#if prob_annot:
# build_fba_params['probannoOnly']=1
# other options that are not exposed
#selecting a model template
fba_meta_data = fbaClient.genome_to_fbamodel(build_fba_params)
model_wsobj_id = fba_meta_data[0]
model_name = fba_meta_data[1]
# fetch the model via fba client
#get_models_params = {
# 'models' : [model_name],
# 'workspaces' : [workspaceName]
#}
#modeldata = fbaClient.get_models(get_models_params)
#meth.advance("Displaying your new FBA model details")
return json.dumps({'id': model_name, 'ws': workspaceName})
@method(name="Translate Model to New Genome")
def _translate_model_to_new_genome(meth, fba_model_id, proteome_cmp, remove_nogene, output_id):
""" Functionality to assign a new genome to an imported model.
A proteome comparison is done between the orginal model genome
and the new desired genome. Metoblic reactions from original model
get mapped to genes in the new genome'. [19]
:param fba_model_id: an FBA model id from first genome [19.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: FBA Model ID
:param proteome_cmp: Proteome comparison ID [19.3]
:type proteome_cmp: kbtypes.GenomeComparison.ProteomeComparison
:ui_name proteome_cmp: Proteome Comparison ID
:param remove_nogene: specify "yes" if reactions with no genes should be removed
:type remove_nogene: kbtypes.Unicode
:ui_name remove_nogene: Remove No-gene Reactions
:param output_id: ID to which translated model should be saved
:type output_id: kbtypes.KBaseFBA.FBAModel
:ui_name output_id: Translated Model ID
:return: Output Translated Model
:rtype: kbtypes.KBaseFBA.FBAModel
:output_widget: kbaseModelTabs
"""
meth.stages = 2 # for reporting progress
meth.advance("Translating model to new genome...")
keep = 1;
if remove_nogene == 'yes':
keep = 0;
token = os.environ['KB_AUTH_TOKEN']
workspace = os.environ['KB_WORKSPACE_ID']
fbaClient = fbaModelServices(url = service.URLS.fba, token = token)
translate_params = {
'protcomp' : proteome_cmp,
'model' : fba_model_id,
'workspace' : workspace,
'keep_nogene_rxn': keep,
'output_id' : output_id}
modeldata = fbaClient.translate_fbamodel(translate_params)
return json.dumps({'ws': workspace, 'id': output_id})
@method(name="View Phenotype Set")
def view_phenotype(meth, phenotype_set_id):
"""Bring up a detailed view of your phenotype set within the narrative.
:param phenotype_set_id: the phenotype set to view
:type phenotype_set_id: kbtypes.KBasePhenotypes.PhenotypeSet
:ui_name phenotype_set_id: Phenotype Set
:return: Phenotype Set Data
:rtype: kbtypes.KBasePhenotypes.PhenotypeSet
:output_widget: kbasePhenotypeSet
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting...")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id;
meth.advance("Loading the phenotype set")
return json.dumps({'ws': meth.workspace_id, 'name': phenotype_set_id})
@method(name="Simulate growth on a Phenotype Set")
def _simulate_phenotype(meth, model, phenotypeSet, phenotypeSimulationSet):
"""Simulate the growth of a model on a phenotype set.
:param model: FBA model
:type model: kbtypes.KBaseFBA.FBAModel
:ui_name model: FBA Model
:param phenotypeSet: Phenotype Set
:type phenotypeSet: kbtypes.KBasePhenotypes.PhenotypeSet
:ui_name phenotypeSet: Phenotype Set
:param phenotypeSimulationSet: Name for result of phenotype simulation (optional)
:type phenotypeSimulationSet: kbtypes.KBasePhenotypes.PhenotypeSimulationSet
:ui_name phenotypeSimulationSet: Phenotype Simulation Result
:return: Generated Phenotype Simulation Set ID
:rtype: kbtypes.KBasePhenotypes.PhenotypeSimulationSet
:output_widget: kbaseSimulationSet
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting")
meth.advance("Simulating Phenotypes")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id
fbaClient = fbaModelServices(service.URLS.fba,token=userToken)
# create the model object
simulate_phenotypes_params = {
'workspace': workspaceName,
'phenotypeSimulationSet': phenotypeSimulationSet,
'model_workspace': workspaceName,
'model': model,
'phenotypeSet_workspace': workspaceName,
'phenotypeSet': phenotypeSet,
}
fba_meta_data = fbaClient.simulate_phenotypes(simulate_phenotypes_params)
wsobj_id = fba_meta_data[0]
name = fba_meta_data[1]
return json.dumps({'name': name, 'ws': workspaceName})
@method(name="View Phenotype Simulation Results")
def view_phenotype(meth, phenotype_set_id):
"""Bring up a detailed view of your Phenotype Simulation results within the narrative.
:param phenotype_set_id: the phenotype results to view
:type phenotype_set_id: kbtypes.KBasePhenotypes.PhenotypeSimulationSet
:ui_name phenotype_set_id: Phenotype Simulation Set
:return: Phenotype Simulation Set Data
:rtype: kbtypes.KBasePhenotypes.PhenotypeSimulationSet
:output_widget: kbaseSimulationSet
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting...")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id;
meth.advance("Loading the phenotype simulation results")
ws = workspaceService(service.URLS.workspace, token=userToken)
return json.dumps({'ws': meth.workspace_id, 'name' : phenotype_set_id})
@method(name="Compare Models")
def _compare_models(meth, model_ids):
"""Compare two or models and compute core, noncore unique reactions, functional roles with their subsystem information.
:param model_ids: list of model ids (comma seperated)
:type model_ids: kbtypes.KBaseFBA.FBAModel
:ui_name model_ids: Model IDs
:return: Uploaded Model Comparison Data
:rtype: kbtypes.Unicode
:output_widget: compmodels
"""
mids = model_ids.split(',')
meth.stages = len(mids)+1 # for reporting progress
meth.advance("Starting...")
#grab token and workspace info, setup the client
token, ws = meth.token, meth.workspace_id;
wss =[]
fba = fbaModelServices(url = service.URLS.fba, token = token)
for mid in mids:
meth.advance("Loading models: "+mid);
wss.append(ws)
modelout =fba.compare_models({'models': mids,
'workspaces': wss,
'workspace': ws})
comparemod = modelout['model_comparisons']
reactioncomp = modelout['reaction_comparisons']
#print meth.debug(json.dumps(comparemod))
#print meth.debug(json.dumps(reactioncomp))
return json.dumps({'data': comparemod})
@method(name="View Metabolic Model Details")
def _view_model_details(meth, fba_model_id):
"""Bring up a detailed view of your metabolic model within the narrative. [7]
:param fba_model_id: the metabolic model to view [7.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: Metabolic Model
:return: Metabolic Model Data
:rtype: kbtypes.Model
:output_widget: kbaseModelTabs
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting...")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id;
meth.advance("Loading the model")
# fetch via fba client (NOW HANDLED IN JS WIDGET)
#fbaClient = fbaModelServices(service.URLS.fba, token=userToken)
#get_models_params = {
# 'models' : [fba_model_id],
# 'workspaces' : [workspaceName]
#}
#modeldata = fbaClient.get_models(get_models_params)
return json.dumps({'id': fba_model_id, 'ws': workspaceName})
@method(name="Delete Reaction")
def _delete_reaction(meth, fba_model_id, reaction_id, output_id):
"""Delete reactions from selected Metabolic Model
:param fba_model_id: the metabolic model to edit
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: Metabolic Model
:param reaction_id: Reactions to be deleted. Add multiple reactions seperated by ;
:type reaction_id: kbtypes.Unicode
:ui_name reaction_id: Reaction(s) ID(s)
:param output_id: ID of model with deleted reactions
:type output_id: kbtypes.KBaseFBA.FBAModel
:ui_name output_id: Edited Model
:return: Metabolic Model Data
:rtype: kbtypes.Model
:output_widget: kbaseModelTabs
"""
meth.debug('delete reaction call')
meth.stages = 2 # for reporting progress
meth.advance("Starting...")
#grab token and workspace info, setup the client
token, ws = meth.token, meth.workspace_id;
#meth.advance("Loading the phenotype set")
#
fba = fbaModelServices(service.URLS.fba, token=token)
meth.debug(output_id)
if output_id:
params = {'model': fba_model_id,
'workspace': ws,
'reaction' : reaction_id.split(';'),
'removeReaction': 1,
'outputid': output_id }
else:
params = {'model': fba_model_id,
'workspace': ws,
'reaction' : reaction_id.split(';'),
'removeReaction': 1}
data = fba.adjust_model_reaction(params)
if output_id:
data = json.dumps({'id': output_id, 'ws': ws})
else:
data = json.dumps({'id': fba_model_id, 'ws': ws})
return data
@method(name="Build Media")
def _build_media(meth, media):
"""Assemble a set of compounds to use as a media set for performing FBA on a metabolic model. [8]
:param base_media: Base media type [8.1]
:type base_media: kbtypes.KBaseBiochem.Media
:ui_name base_media: Media ID
:return: Metadata from new Media object
:rtype: kbtypes.KBaseBiochem.Media
:input_widget: kbaseBuildMediaInput
:output_widget: kbaseMediaViewer
:embed: True
"""
meth.stages = 3
meth.advance("Initializing")
token, workspace_id = meth.token, meth.workspace_id
fba = fbaModelServices(service.URLS.fba, token=token)
media = json.loads(media)
media['auth'] = token
media['workspace'] = workspace_id
meth.advance("Submitting Media to workspace")
media_meta = fba.addmedia(media)
meth.advance("Rendering new Media object")
fetch_media_input = {
'medias' : [media['name']],
'workspaces' : [workspace_id],
'auth' : token
}
new_media = fba.get_media(fetch_media_input)
result = {'metadata': media_meta, 'media' : new_media[0] }
return json.dumps(result)
@method(name="View Media")
def _view_media(meth, media_id):
"""Bring up a detailed view of a Media set within the narrative. [9]
:param media_id: Media type [9.1]
:type media_id: kbtypes.KBaseBiochem.Media
:ui_name media_id: Media ID
:return: A Media object
:rtype: kbtypes.KBaseBiochem.Media
:output_widget: kbaseMediaViewer
:embed: True
"""
meth.stages = 3
meth.advance("Initializing")
token, workspace_id = meth.token, meth.workspace_id
fba = fbaModelServices(service.URLS.fba, token=token)
meth.advance("Fetching Media from workspace")
fetch_media_input = {
'medias' : [media_id],
'workspaces' : [workspace_id],
'auth' : token
}
media = fba.get_media(fetch_media_input)
meth.advance("Rendering Media object")
result = {'metadata' : None, 'media' : media[0]}
return json.dumps(result)
@method(name="Run Flux Balance Analysis")
def _run_fba(meth, fba_model_id, media_id, fba_result_id, geneko, rxnko, defaultmaxflux, defaultminuptake, defaultmaxuptake, minimizeFlux, maximizeObjective, allreversible, prom):
"""Run Flux Balance Analysis on a metabolic model. [10]
:param fba_model_id: the metabolic model you wish to run [10.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: Metabolic Model
:param media_id: the media condition in which to run FBA (optional, default is an artificial complete media) [10.2]
:type media_id: kbtypes.KBaseBiochem.Media
:ui_name media_id: Media
:param fba_result_id: select a name for the FBA result object (optional) [10.3]
:type fba_result_id: kbtypes.KBaseFBA.FBA
:ui_name fba_result_id: Output FBA Result Name
:param geneko: specify gene knockouts by the gene's feature ID delimited by semicolons(;) (optional) [10.4]
:type geneko: kbtypes.Unicode
:ui_name geneko: Gene Knockouts
:param rxnko: specify reaction knockouts by reaction ID delimited by semicolons(;) (optional) [10.5]
:type rxnko: kbtypes.Unicode
:ui_name rxnko: Reaction Knockouts
:param defaultmaxflux: specify the default maximum intracellular flux (optional) [10.6]
:type defaultmaxflux: kbtypes.Unicode
:ui_name defaultmaxflux: Default Maximum flux
:default defaultmaxflux: 100
:param defaultminuptake: specify the default minumum nutrient uptake flux (optional) [10.7]
:type defaultminuptake: kbtypes.Unicode
:ui_name defaultminuptake: Default Min Uptake
:default defaultminuptake: -100
:param defaultmaxuptake: specify the default maximum nutrient uptake flux (optional) [10.8]
:type defaultmaxuptake: kbtypes.Unicode
:ui_name defaultmaxuptake: Default Max Uptake
:default defaultmaxuptake: 0
:param minimizeFlux: set to 'yes' or '1' to run FBA by minimizing flux (optional) [10.9]
:type minimizeFlux: kbtypes.Unicode
:ui_name minimizeFlux: Minimize Flux?
:default minimizeFlux: no
:param maximizeObjective: set to 'no' or '0' to run FBA without maximizing the objective function (optional) [10.10]
:type maximizeObjective: kbtypes.Unicode
:ui_name maximizeObjective: Maximize Objective?
:default maximizeObjective: yes
:param allreversible: set to 'yes' or '1' to allow all model reactions to be reversible (optional) [10.11]
:type allreversible: kbtypes.Unicode
:ui_name allreversible: All rxns reversible?
:default allreversible: no
:param prom: specify the PROM constraint to apply for regulation of the metabolic model (optional) [10.12]
:type prom: kbtypes.KBaseFBA.PromConstraint
:ui_name prom: PROM constraint
:return: something
:rtype: kbtypes.Unicode
:output_widget: kbaseFbaTabsNarrative
"""
## !! Important note! the default values set here are for display only, so we actually revert to the
## default values in the FBA modeling service. Thus, if default values are updated there, the default values
## displayed to the end user will be incorrect!
meth.stages = 3
meth.advance("Setting up and validating FBA parameters")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id;
fbaClient = fbaModelServices(service.URLS.fba, token=userToken)
# setup the parameters
"""
bool minimizeflux - a flag indicating if flux variability should be run (an optional argument: default is '0')
typedef structure {
fbamodel_id model;
workspace_id model_workspace;
FBAFormulation formulation;
bool fva;
bool simulateko;
bool minimizeflux;
bool findminmedia;
string notes;
fba_id fba;
workspace_id workspace;
string auth;
bool overwrite;
bool add_to_model;
} runfba_params;
typedef structure {
media_id media;
list<compound_id> additionalcpds;
promconstraint_id promconstraint;
workspace_id promconstraint_workspace;
workspace_id media_workspace;
float objfraction;
bool allreversible;
bool maximizeObjective;
list<term> objectiveTerms;
list<feature_id> geneko;
list<reaction_id> rxnko;
list<bound> bounds;
list<constraint> constraints;
mapping<string,float> uptakelim;
float defaultmaxflux;
float defaultminuptake;
float defaultmaxuptake;
bool simplethermoconst;
bool thermoconst;
bool nothermoerror;
bool minthermoerror;
} FBAFormulation;
"""
# handle and/or validate parameters...
if not fba_model_id:
raise Exception("Error in running FBA: model name was not specified")
if media_id:
fba_formulation = {
'media' : media_id,
'media_workspace' : workspaceName,
}
else:
fba_formulation = {}
fba_params = {
'model' : fba_model_id,
'model_workspace' : workspaceName,
'formulation' : fba_formulation,
'workspace' : workspaceName,
'notes' : "ran from the narrative"
}
fba_result_id = fba_result_id.strip()
if fba_result_id:
fba_params['fba'] = fba_result_id
if geneko:
fba_params['simulateko'] = 0
fba_params['formulation']['geneko']=geneko.split(";")
if rxnko:
fba_params['simulateko'] = 0
fba_params['formulation']['rxnko']=rxnko.split(";")
if maximizeObjective=='0' or maximizeObjective=='false' or maximizeObjective=='no':
fba_params['formulation']['maximizeObjective'] = 0
else:
fba_params['formulation']['maximizeObjective'] = 1
if minimizeFlux=='1' or minimizeFlux=='true' or minimizeFlux=='yes':
fba_params['minimizeflux'] = 1
else:
fba_params['minimizeflux'] = 0
if allreversible=='1' or allreversible=='true' or allreversible=='yes':
fba_params['formulation']['allreversible'] = 1
else:
fba_params['formulation']['allreversible'] = 0
if prom:
fba_params['formulation']['promconstraint'] = prom
fba_params['formulation']['promconstraint_workspace'] = workspaceName
if defaultmaxflux:
try:
fba_params['formulation']['defaultmaxflux'] = float(defaultmaxflux)
except:
raise Exception("Default maximum flux must be a valid number.")
else:
fba_params['formulation']['defaultmaxflux'] = 100
if defaultminuptake:
try:
fba_params['formulation']['defaultminuptake'] = float(defaultminuptake)
except:
raise Exception("Default minimum uptake must be a valid number.")
else:
fba_params['formulation']['defaultminuptake'] = -100
if defaultmaxflux:
try:
fba_params['formulation']['defaultmaxuptake'] = float(defaultmaxuptake)
except:
raise Exception("Default maximum uptake must be a valid number.")
else:
fba_params['formulation']['defaultmaxuptake'] = 0
meth.debug(json.dumps(fba_params))
meth.advance("Running FBA")
fbaClient = fbaModelServices(url=service.URLS.fba,token=userToken)
result_meta = fbaClient.runfba(fba_params)
generated_fba_id = result_meta[0]
#meth.advance("Retrieving FBA results")
#get_fbas_params = {
# 'fbas' : [generated_fba_id],
# 'workspaces' : [workspaceName]
#}
#fbadata = fbaClient.get_fbas(get_fbas_params)
# a hack: get object info so we can have the object name (instead of the id number)
ws = workspaceService(service.URLS.workspace, token=userToken)
meth.advance("Loading the model")
get_objects_params = [{
'ref' : workspaceName+"/"+generated_fba_id
}]
info = ws.get_object_info(get_objects_params,0)
return json.dumps({ "ids":[info[0][1]],"workspaces":[workspaceName] })
@method(name="View FBA Result Details")
def _view_fba_result_details(meth, fba_id):
"""Bring up a detailed view of your FBA result within the narrative. [11]
:param fba_id: the FBA Result to view [11.1]
:type fba_id: kbtypes.KBaseFBA.FBA
:ui_name fba_id: FBA Result
:return: something
:rtype: kbtypes.Unicode
:output_widget: kbaseFbaTabsNarrative
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting...")
#grab token and workspace info, setup the client
token, workspaceName = meth.token, meth.workspace_id;
#fbaClient = fbaModelServices(service.URLS.fba)
meth.advance("Retrieving FBA results")
#get_fbas_params = {
# 'fbas' : [fba_id],
# 'workspaces' : [workspaceName],
# 'auth' : token
#}
#fbadata = fbaClient.get_fbas(get_fbas_params)
return json.dumps({ "ids":[fba_id],"workspaces":[workspaceName] })
@method(name="Compare FBA Results")
def _compare_fbas(meth, fba_id1, fba_id2):
"""Compare two FBA results, showing differences in fluxes for reactions.
:param fba_id1: First FBA result
:type fba_id1: kbtypes.KBaseFBA.FBA
:ui_name fba_id1: First FBA result
:param fba_id2: Second FBA result
:type fba_id2: kbtypes.KBaseFBA.FBA
:ui_name fba_id2: Second FBA result
:return: FBA Result Comparison Data
:rtype: kbtypes.Unicode
:output_widget: kbaseCompareFBAs
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting...")
return json.dumps({'ids': [fba_id1, fba_id2],"ws": meth.workspace_id})
@method(name="Gapfill a Metabolic Model")
def _gapfill_fba(meth, fba_model_id, media_id,source_model_id,int_sol, output_model_id):
"""Run Gapfilling on an metabolic model. Gapfill attempts to identify the minimal number of reactions
needed to add to your metabolic model in order for the model to predict growth in the
given media condition (or in complete media if no Media is provided). Gapfilling is
an optimization procedure that can produce many possible solutions. After a gapfilling
job is submitted and run, you can view the results by viewing a metabolic model details,
and incorporate the new reactions by running the Integrate Gapfill Solution function. [12]
:param fba_model_id: the metabolic model to gapfill [12.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: Metabolic Model
:param media_id: the media condition in which to gapfill [12.2]
:type media_id: kbtypes.KBaseBiochem.Media
:ui_name media_id: Media
:param source_model_id: model to gapfill from
:type source_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name source_model_id: Source Gapfill Model
:param int_sol: automatically integrate solution (yes/no)
:type int_sol: kbtypes.Unicode
:ui_name int_sol: Integrate Solution
:param output_model_id: select a name for the model result object (optional)
:type output_model_id: kbtypes.Unicode
:ui_name output_model_id: Output Model ID
:return: Metabolic Model Data
:rtype: kbtypes.Model
:output_widget: kbaseModelTabs
"""
# setting the output id appears to not work, so for now we leave it out
meth.stages = 2
meth.advance("Running gapfill on model...")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id;
fbaclient = fbaModelServices(url=service.URLS.fba, token=userToken)
fba_formulation = {}
if (media_id):
fba_formulation = {
'media' : media_id,
'media_workspace' : workspaceName
}
gapfill_formulation = {
'formulation' : fba_formulation,
}
gapfill_params = {
'model' : fba_model_id,
'model_workspace' : workspaceName,
'formulation' : gapfill_formulation,
'workspace' : workspaceName,
'fastgapfill' : 1,
}
if(int_sol):
gapfill_params['integrate_solution'] = int_sol;
if (gapfill_params['integrate_solution'] == 'yes'):
gapfill_params['integrate_solution'] = 1;
if(output_model_id):
gapfill_params['out_model'] = output_model_id;
if(source_model_id):
gapfill_params['source_model'] = source_model_id;
gapfill_params['source_model_ws'] = workspaceName;
output = fbaclient.gapfill_model(gapfill_params);
if output_model_id:
data = json.dumps({'id': output_model_id, 'ws': workspaceName})
else:
data = json.dumps({'id': fba_model_id, 'ws': workspaceName})
return data
@method(name="Integrate Gapfill Solution")
def _integrate_gapfill(meth, fba_model_id, gapfill_id, output_model_id):
"""Integrate a Gapfill solution into your metabolic model [13]
:param fba_model_id: the metabolic model to integrate gapfill solutions into [13.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: Metabolic Model
:param gapfill_id: select the ID of the gapfill solution (found in the Gapfilling tab in the model viewer, usually in the form 'modelId.gf.2.gfsol.1') [13.2]
:type gapfill_id: kbtypes.KBaseFBA.Gapfilling
:ui_name gapfill_id: Gapfill ID
:default gapfill_id: e.g model.gf.2.gfsol.1
:param output_model_id: select a name for the gapfilled object (optional) [13.3]
:type output_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name output_model_id: Output Model Result Name
:output_widget: kbaseIntegrateGapfillOutput
:return: gapfilled model ID
:rtype: kbtypes.Unicode
"""
meth.stages = 2
meth.advance("Setting up parameters")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id;
fbaClient = fbaModelServices(service.URLS.fba, token=userToken)
"""
typedef structure {
fbamodel_id model;
workspace_id model_workspace;
list<gapfillsolution_id> gapfillSolutions;
list<gapgensolution_id> gapgenSolutions;
fbamodel_id out_model;
workspace_id workspace;
string auth;
bool overwrite;
} integrate_reconciliation_solutions_params;
"""
integrate_params = {
'model' : fba_model_id,
'model_workspace' : workspaceName,
'gapfillSolutions' : [gapfill_id],
'gapgenSolutions' : [],
'workspace' : workspaceName
}
# get the model to determine the number of reactions
wsClient = workspaceService(service.URLS.workspace, token=userToken)
firstReactionList = wsClient.get_object_subset([{'ref':workspaceName+"/"+fba_model_id, 'included':["/modelreactions/[*]/id"]}])
#meth.debug(json.dumps(firstReactionList));
output_model_id = output_model_id.strip()
if (output_model_id):
integrate_params['out_model'] = output_model_id
else:
output_model_id = fba_model_id
# funcdef integrate_reconciliation_solutions(integrate_reconciliation_solutions_params input) returns (object_metadata modelMeta);
meth.advance("Integrating the gapfill solution")
model_meta = fbaClient.integrate_reconciliation_solutions(integrate_params)
finalReactionList = wsClient.get_object_subset([{'ref':workspaceName+"/"+output_model_id, 'included':["/modelreactions/[*]/id"]}])
return json.dumps( {
"workspaceName":workspaceName,
"originalModel":fba_model_id,
"originalModelRef":str(firstReactionList[0]['info'][6])+"/"+str(firstReactionList[0]['info'][0])+"/"+str(firstReactionList[0]['info'][4]),
"startingNumRxns":len(firstReactionList[0]['data']['modelreactions']),
"newModel":output_model_id,
"newModelRef":str(finalReactionList[0]['info'][6])+"/"+str(finalReactionList[0]['info'][0])+"/"+str(finalReactionList[0]['info'][4]),
"endingNumRxns":len(finalReactionList[0]['data']['modelreactions'])
})
#@method(name="Upload Phenotype Data")
def _upload_phenotype(meth, genome_id, phenotype_id):
"""Upload phenotype data for FBA analysis [14]
:param genome_id: a genome id [14.1]
:type genome_id: kbtypes.KBaseGenomes.Genome
:ui_name genome_id: Genome ID
:param phenotype_id: a phenotype ID [14.2]
:type phenotype_id: kbtypes.KBasePhenotypes.PhenotypeSet
:ui_name phenotype_id: Phenotype Dataset ID
:return: something
:rtype: kbtypes.Unicode
:output_widget: PhenotypeUploader
"""
if not phenotype_id:
phenotype_id = "phenotype_" + ''.join([chr(random.randrange(0, 26) + ord('A')) for _ in xrange(8)])
token = os.environ['KB_AUTH_TOKEN']
workspace = os.environ['KB_WORKSPACE_ID']
return json.dumps({'ws_name': workspace, 'genome_id': genome_id, 'phenotype_id': phenotype_id})
#@method(name="Reconcile Phenotype Data")
def _reconcile_phenotype(meth, fba_model_id, phenotype_id, out_model_id):
"""Run Gapfilling on an FBA Model [16]
:param fba_model_id: an FBA model id [16.1]
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: FBA Model ID
:param phenotype_id: a phenotype simulation ID [16.2]
:type phenotype_id: kbtypes.KBasePhenotypes.PhenotypeSimulationSet
:ui_name phenotype_id: Phenotype Simulation Dataset ID
:param out_model_id: a name for the generated FBA Model (optional) [16.3]
:type out_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name out_model_id: Output FBA Model Name
:return: something
:rtype: kbtypes.Unicode
:output_widget: kbaseModelMetaNarrative
"""
if not out_model_id:
out_model_id = "model_" + ''.join([chr(random.randrange(0, 26) + ord('A')) for _ in xrange(8)])
token = os.environ['KB_AUTH_TOKEN']
workspace = os.environ['KB_WORKSPACE_ID']
fbaClient = fbaModelServices(service.URLS.fba)
wildtype_phenotype_reconciliation_params = {
'auth': token,
'model_workspace': workspace,
'model': fba_model_id,
'phenotypeSet_workspace': workspace,
'phenotypeSet': phenotype_id,
'workspace': workspace,
'out_model': out_model_id,
}
job_id = fbaClient.queue_wildtype_phenotype_reconciliation(wildtype_phenotype_reconciliation_params)['id']
return json.dumps({'ws_name': workspace, 'model_id': out_model_id, 'job_id': job_id})
@method(name="Compare Two Metabolic Models")
def _compare_fba_models(meth, fba_model1, fba_model2, proteome_cmp):
""" Compare genes mapped to the same reactions from two metabolic models according to
the comparison result between proteomes. See also the function 'Compare Two Proteomes'. [19]
:param fba_model1: an FBA model id from first genome [19.1]
:type fba_model1: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model1: FBA Model 1 ID
:param fba_model2: an FBA model id from second genome [19.2]
:type fba_model2: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model2: FBA Model 2 ID
:param proteome_cmp: Proteome comparison ID [19.3]
:type proteome_cmp: kbtypes.GenomeComparison.ProteomeComparison
:ui_name proteome_cmp: Proteome Comparison ID
:return: Output Comparison Result
:rtype: kbtypes.Unicode
:output_widget: FbaModelComparisonWidget
"""
meth.stages = 1 # for reporting progress
token = os.environ['KB_AUTH_TOKEN']
workspace = os.environ['KB_WORKSPACE_ID']
#fbaClient = fbaModelServices(url = service.URLS.fba, token = token)
#get_models_params = {
# 'models' : [fba_model1, fba_model2],
# 'workspaces' : [workspace, workspace],
# 'auth' : token
# }
#modeldata = fbaClient.get_models(get_models_params)
#model1 = modeldata[0]
#model2 = modeldata[1]
return json.dumps({'ws_name': workspace, 'fba_model1_id': fba_model1, 'fba_model2_id': fba_model2, 'proteome_cmp': proteome_cmp})
@method(name="Build a PROM constraint")
def _build_promconstraint(meth, genome_id, series_id, regulome_id):
"""Given a gene expression series and a regulome, build a PROM constraint for FBA. [24]
:param genome_id: Genome ID [24.1]
:type genome_id: kbtypes.KBaseGenomes.Genome
:ui_name genome_id: Genome Name
:param series_id: Gene Expression Series ID [24.2]
:type series_id: kbtypes.KBaseExpression.ExpressionSeries
:ui_name series_id: Gene Expression Series Name
:param regulome_id: Regulome ID [24.3]
:type regulome_id: kbtypes.KBaseRegulation.Regulome
:ui_name regulome_id: Regulome Name
:return: Generated PROM constraint ID
:rtype: kbtypes.KBaseFBA.PromConstraint
:output_widget: kbasePromConstraint
"""
meth.stages = 2 # for reporting progress
meth.advance("Starting")
meth.advance("Building your new PROM constraint")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id
fbaClient = fbaModelServices(url=service.URLS.fba,token=userToken)
# create the model object
build_pc_params = {
'genome_id': genome_id,
'series_id': series_id,
'regulome_id': regulome_id,
'workspace': workspaceName
}
fba_meta_data = fbaClient.create_promconstraint(build_pc_params)
wsobj_id = fba_meta_data[0]
name = fba_meta_data[1]
return json.dumps({'name': name, 'ws': workspaceName})
@method(name="Generate SBML file")
def _build_promconstraint(meth, model_id):
"""Generates an SBML file for the specified model.
:param fba_model_id: an FBA model id
:type fba_model_id: kbtypes.KBaseFBA.FBAModel
:ui_name fba_model_id: FBA Model ID
:return: SBML File Downlaod
:rtype: kbtypes.Unicode
:output_widget: DownloadFileWidget
"""
meth.stages = 2 # for reporting progress
meth.advance("Building model SBML file...")
#grab token and workspace info, setup the client
userToken, workspaceName = meth.token, meth.workspace_id
fbaClient = fbaModelServices(url=service.URLS.fba,token=userToken)
# create the model object
export_model_params = {
'model': model_id,
'workspace': workspaceName,
'format': "sbml"
}
sbmlfile = fbaClient.export_fbamodel(export_model_params)
return json.dumps({'data': sbmlfile, 'name': model_id+'.sbml'})
#
#@method(name="Edit Data")
#def _edit_data(meth, obj_name, type):
# """Edit data in your workspace.
# :param object_name: name of the data object
# :type object_id: kbtypes.WorkspaceObjectId
# :ui_name object_id: Data Name
# :param type: type of the data object
# :type type: kbtypes.Unicode
# :ui_name type: Data Type
# :return: something
# :rtype: kbtypes.Unicode
# """
#
#
# """
# :output_widget: kbaseFbaResultViewer
# """
#
# meth.stages = 3
# meth.advance("Setting up FBA parameters")
#
# #grab token and workspace info, setup the client
# token, workspaceName = meth.token, meth.workspace_id;
#
# wsClient = workspaceService(service.URLS.workspace)
# get_obj_params = {
# 'auth' : token,
# ''
# }
# objData = wsClient.get_object();
#
# return json.dumps({ "obj":objData })
#
#
# Finalize (registers service)
finalize_service()
| {
"content_hash": "574387340c18aa03917add4c3482cbe5",
"timestamp": "",
"source": "github",
"line_count": 1055,
"max_line_length": 179,
"avg_line_length": 36.55260663507109,
"alnum_prop": 0.6649638254285196,
"repo_name": "nlharris/narrative",
"id": "510f4315369e20ef847a1414caa92bfa2304958c",
"size": "38563",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/biokbase/narrative/services/microbes_metabolic_modeling.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "704233"
},
{
"name": "HTML",
"bytes": "361317"
},
{
"name": "JavaScript",
"bytes": "11750474"
},
{
"name": "Lua",
"bytes": "84821"
},
{
"name": "Makefile",
"bytes": "18652"
},
{
"name": "PHP",
"bytes": "1691"
},
{
"name": "Perl",
"bytes": "2313"
},
{
"name": "Python",
"bytes": "3819944"
},
{
"name": "R",
"bytes": "39956"
},
{
"name": "Shell",
"bytes": "27614"
}
],
"symlink_target": ""
} |
"""Tests for tensorflow.ops.gradients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import warnings
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function as framework_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.framework.constant_op import constant
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_grad # pylint: disable=unused-import
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_grad # pylint: disable=unused-import
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import data_flow_grad # pylint: disable=unused-import
from tensorflow.python.ops import data_flow_ops # pylint: disable=unused-import
from tensorflow.python.ops import functional_ops # pylint: disable=unused-import
from tensorflow.python.ops import gradients
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_grad # pylint: disable=unused-import
from tensorflow.python.ops import tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.ops.nn_ops import bias_add
from tensorflow.python.platform import googletest
class GradientsTest(test_util.TensorFlowTestCase):
def testGradients(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[32, 100], name="in")
w = constant(1.0, shape=[100, 10], name="w")
b = constant(1.0, shape=[10], name="b")
xw = math_ops.matmul(inp, w, name="xw")
h = bias_add(xw, b, name="h")
w_grad = gradients.gradients(h, w)[0]
self.assertEquals("MatMul", w_grad.op.type)
self.assertEquals(w_grad.op._original_op, xw.op)
self.assertTrue(w_grad.op.get_attr("transpose_a"))
self.assertFalse(w_grad.op.get_attr("transpose_b"))
def testUnusedOutput(self):
with ops.Graph().as_default():
w = constant(1.0, shape=[2, 2])
x = constant(1.0, shape=[2, 2])
wx = math_ops.matmul(w, x)
split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
c = math_ops.reduce_sum(split_wx[1])
gw = gradients.gradients(c, [w])[0]
self.assertEquals("MatMul", gw.op.type)
def testColocateGradients(self):
with ops.Graph().as_default() as g:
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
with g.device("/device:GPU:0"):
wx = math_ops.matmul(w, x)
gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
def testColocateGradientsWithAggregation(self):
with ops.Graph().as_default() as g:
with g.device("/device:GPU:1"):
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
y = constant(1.0, shape=[1, 2])
wx = math_ops.matmul(w, x)
wy = math_ops.matmul(w, y)
with g.device("/device:GPU:0"):
z = wx + wy
gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
def testColocateGradientsWithAggregationInMultipleDevices(self):
with ops.Graph().as_default() as g:
with g.device("/device:GPU:1"):
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
y = constant(1.0, shape=[1, 2])
with g.device("/task:1"):
wx = math_ops.matmul(w, x)
with g.device("/task:2"):
wy = math_ops.matmul(w, y)
with g.device("/device:GPU:0"):
z = wx + wy
gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
def testColocateGradientsWithGateGradients(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
with ops.Graph().as_default() as g:
with g.device("/device:CPU:0"):
x = constant(1.0, shape=[1, 1])
y = constant(1.0, shape=[1, 1])
s = x + y
with g.device("/device:GPU:0"):
z = math_ops.reduce_sum(s)
gz_x = gradients.gradients(z, [x], colocate_gradients_with_ops=True,
gate_gradients=True)[0]
with session.Session():
# Make sure the placer doesn't complain.
self.evaluate(gz_x)
def testBoundaryStop(self):
# Test that we don't differentiate 'x'. The gradient function for 'x' is
# set explicitly to None so we will get an exception if the gradient code
# tries to differentiate 'x'.
with ops.Graph().as_default():
c = constant(1.0)
x = array_ops.identity(c)
y = x + 1.0
z = y + 1
grads = gradients.gradients(z, [x])
self.assertTrue(all(x is not None for x in grads))
@test_util.run_v1_only("b/120545219")
def testBoundaryContinue(self):
# Test that we differentiate both 'x' and 'y' correctly when x is a
# predecessor of y.
with self.cached_session():
x = constant(1.0)
y = x * 2.0
z = y * 3.0
grads = gradients.gradients(z, [x, y])
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(6.0, grads[0].eval())
@test_util.run_v1_only("b/120545219")
def testAggregationMethodAccumulateN(self):
with self.cached_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z, [x, y],
aggregation_method=gradients.AggregationMethod.
EXPERIMENTAL_ACCUMULATE_N)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
@test_util.run_v1_only("b/120545219")
def testAggregationMethodAddN(self):
with self.cached_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
@test_util.run_v1_only("b/120545219")
def testAggregationMethodTree(self):
with self.cached_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z, [x, y],
aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testNoGradientForStringOutputs(self):
with ops.Graph().as_default():
def _TestOpGrad(_, float_grad, string_grad):
"""Gradient function for TestStringOutput."""
self.assertEquals(float_grad.dtype, dtypes.float32)
self.assertFalse(string_grad)
return float_grad
ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
c = constant(1.0)
x, _ = test_ops.test_string_output(c)
z = x * 2.0
w = z * 3.0
grads = gradients.gradients(z, [c])
self.assertTrue(isinstance(grads[0], ops.Tensor))
grads = gradients.gradients(w, [c])
self.assertTrue(isinstance(grads[0], ops.Tensor))
def testSingletonIndexedSlices(self):
with ops.Graph().as_default():
x = array_ops.placeholder(dtypes.float32)
y = array_ops.identity(x)
dy = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32))
dx, = gradients.gradients(y, x, grad_ys=dy)
# The IndexedSlices gradient of tf.identity is the identity map.
with self.cached_session() as sess:
vdx, vdy = sess.run(
[dx, dy], feed_dict={x: [1.0], dy.indices: [0], dy.values: [2.0]})
self.assertEqual(vdx, vdy)
@test_util.run_v1_only("b/120545219")
def testNonDifferentiableSwitchInWhileLoop(self):
with ops.Graph().as_default():
v = array_ops.placeholder(dtypes.float32, [])
def _Step(i, a, ta):
a += math_ops.cast(v, dtypes.int32)
return (i + 1, a, ta.write(i, a))
n = 4
i, _, ta = control_flow_ops.while_loop(
lambda i, *_: i < n,
_Step, [0, 0, tensor_array_ops.TensorArray(
dtypes.int32, size=n)])
target = ta.read(i - 1)
grad, = gradients.gradients(target, v)
self.assertIsNone(grad)
def testVariableReadValueGradient(self):
with ops.Graph().as_default():
init = constant_op.constant(100.0)
var = variables.Variable(init)
gradient = gradients.gradients(var.read_value(), var)
self.assertIsNotNone(gradient)
def testVariableAsGraphElementGradient(self):
with ops.Graph().as_default() as graph:
init = constant_op.constant(100.0)
var = variables.Variable(init)
gradient = gradients.gradients(graph.as_graph_element(var), var)
self.assertIsNotNone(gradient)
@test_util.run_v1_only("b/120545219")
def testVariableRefGradient(self):
with ops.Graph().as_default():
init = constant_op.constant(100.0)
var = variables.VariableV1(init)
gradient = gradients.gradients(var._ref(), var)
self.assertIsNotNone(gradient)
@test_util.run_v1_only("b/120545219")
def testDependentYs(self):
with self.cached_session():
x = constant_op.constant(3.0)
y = math_ops.square(x)
y1 = math_ops.square(y)
y2 = math_ops.square(y1)
g = gradients.gradients([y, y2], x)
self.assertAllClose(17502.0, g[0].eval())
g = gradients.gradients(y + y2, x)
self.assertAllClose(17502.0, g[0].eval())
z = array_ops.identity(y)
z2 = array_ops.identity(y2)
g = gradients.gradients([z, z2], x)
self.assertAllClose(17502.0, g[0].eval())
@test_util.run_v1_only("b/120545219")
def testPartialDerivatives(self):
with self.cached_session():
x = constant_op.constant(1.)
y = 2 * x
z = x + y
totalg = gradients.gradients(z, [x, y])
self.assertEqual([3.0, 1.0], [g.eval() for g in totalg])
partialg = gradients.gradients(z, [x, y], stop_gradients=[x, y])
self.assertEqual([1.0, 1.0], [g.eval() for g in partialg])
@test_util.run_v1_only("b/120545219")
def testStopGradients(self):
def _MakeGraph(rng, stop_gradients=()):
def _FunctionOf(xs, k=3):
return ops.convert_to_tensor(
sum(math_ops.matmul(rng.rand(k, k), x) for x in xs)
+ rng.rand(k, k))
a = _FunctionOf([])
if "a" in stop_gradients: a = array_ops.stop_gradient(a)
b = _FunctionOf([a])
if "b" in stop_gradients: b = array_ops.stop_gradient(b)
c = _FunctionOf([a, b])
if "c" in stop_gradients: c = array_ops.stop_gradient(c)
d = _FunctionOf([b, c])
if "d" in stop_gradients: d = array_ops.stop_gradient(d)
return dict(a=a, b=b, c=c, d=d)
def _Gradients(ys, xs, **kwargs):
dydxs = gradients.gradients(ys, xs, **kwargs)
dydxs = [0. * x if dydx is None else dydx
for x, dydx in zip(xs, dydxs)]
return dydxs
seed = np.random.randint(1000)
cases = []
subsets = [""] + "a b c d ab ac ad bc bd cd abc abd acd bcd abcd".split()
graph = _MakeGraph(np.random.RandomState(seed))
for constants in subsets:
graph_with_stops = _MakeGraph(np.random.RandomState(seed), constants)
for variables_ in subsets:
# compute the gradient when stopped using tf.stop_gradients
grad1 = _Gradients([graph_with_stops["d"]],
[graph_with_stops[v] for v in variables_])
# compute the gradient when stopped using the stop_gradients kwarg
grad2 = _Gradients([graph["d"]],
[graph[v] for v in variables_],
stop_gradients=[graph[v] for v in constants])
cases.append(dict(grad1=grad1, grad2=grad2,
constants=constants, variables=variables_))
# evaluate all tensors in one call to session.run for speed
with self.cached_session() as sess:
results = sess.run([(case["grad1"], case["grad2"]) for case in cases])
for (npgrad1, npgrad2), case in zip(results, cases):
for a, b in zip(npgrad1, npgrad2):
np.testing.assert_allclose(a, b)
def testUnconnectedGradientsNoneUnconnectedGradients(self):
with ops.Graph().as_default():
x = constant(1.0, shape=[2, 2])
y = constant(3.0, shape=[3, 1])
grad = gradients.gradients(
[y], [x], unconnected_gradients="none")
self.assertIsNone(grad[0])
def testUnconnectedGradientsZerosUnconnectedGradients(self):
with ops.Graph().as_default():
x = constant(1.0, shape=[2, 2])
y = constant(3.0, shape=[3, 1])
grads = gradients.gradients(
[y], [x], unconnected_gradients="zero")
with self.cached_session() as sess:
self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(grads)[0])
def testUnconnectedGradientsZeroConnectedGradients(self):
with ops.Graph().as_default():
x = constant(1.0)
y = x * 3.0
grad = gradients.gradients(
[y], [x], unconnected_gradients="zero")
with self.cached_session() as sess:
self.assertEquals(3.0, self.evaluate(grad)[0])
def testUnknownUnconnectedGradientsValueGiven(self):
with ops.Graph().as_default():
x = constant(1.0)
y = constant(1.0)
with self.assertRaisesRegexp(
ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
gradients.gradients([y], [x], unconnected_gradients="nonsense")
class FunctionGradientsTest(test_util.TensorFlowTestCase):
@classmethod
def XSquarePlusB(cls, x, b):
return x * x + b
@classmethod
def XSquarePlusBGradient(cls, x, b, g):
# Perturb gradients (multiply by 2), so we can test that this was called.
g *= 2.0
return g * 2.0 * x, g
@classmethod
def _PythonGradient(cls, op, grad):
# Perturb gradients (multiply by 3), so we can test that this was called.
grad *= 3.0
return grad * op.inputs[0] * 2.0, grad
@classmethod
def _GetFunc(cls, **kwargs):
return framework_function.Defun(dtypes.float32, dtypes.float32, **
kwargs)(cls.XSquarePlusB)
def _GetFuncGradients(self, f, x_value, b_value):
x = constant_op.constant(x_value, name="x")
b = constant_op.constant(b_value, name="b")
y = f(x, b)
grads = gradients.gradients(y, [x, b])
with self.cached_session() as sess:
return sess.run(grads)
def testFunctionGradientsBasic(self):
g = ops.Graph()
with g.as_default():
f = self._GetFunc()
# Get gradients (should add SymbolicGradient node for function).
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0], grads[0])
self.assertAllEqual([1.0], grads[1])
def testFunctionGradientsComposition(self):
with ops.Graph().as_default():
f = self._GetFunc()
x = constant_op.constant([2.0], name="x")
b1 = constant_op.constant([1.0], name="b1")
b2 = constant_op.constant([1.0], name="b2")
y = f(f(x, b1), b2)
# Build gradient graph (should add SymbolicGradient node for function).
grads = gradients.gradients(y, [x, b1])
with self.cached_session() as sess:
self.assertAllEqual([40.0], self.evaluate(grads)[0])
self.assertAllEqual([10.0], self.evaluate(grads)[1])
def testFunctionGradientsWithGradFunc(self):
g = ops.Graph()
with g.as_default():
grad_func = framework_function.Defun(dtypes.float32, dtypes.float32,
dtypes.float32)(
self.XSquarePlusBGradient)
f = self._GetFunc(grad_func=grad_func)
# Get gradients (should add SymbolicGradient node for function, which
# uses the grad_func above, which multiplies all gradients by 2).
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0 * 2], grads[0])
self.assertAllEqual([1.0 * 2], grads[1])
def testFunctionGradientWithRegistration(self):
g = ops.Graph()
with g.as_default():
f = self._GetFunc(python_grad_func=self._PythonGradient)
# Get gradients, using the python gradient function. It multiplies the
# gradients by 3.
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0 * 3], grads[0])
self.assertAllEqual([1.0 * 3], grads[1])
def testFunctionGradientWithGradFuncAndRegistration(self):
g = ops.Graph()
with g.as_default():
grad_func = framework_function.Defun(dtypes.float32, dtypes.float32,
dtypes.float32)(
self.XSquarePlusBGradient)
with self.assertRaisesRegexp(ValueError, "Gradient defined twice"):
f = self._GetFunc(
grad_func=grad_func, python_grad_func=self._PythonGradient)
f.add_to_graph(ops.Graph())
def testGradientWrtCaptured(self):
with ops.Graph().as_default():
x = constant_op.constant(1.0, name="x")
@function.defun()
def Foo():
y = math_ops.multiply(x, 2.0, name="y")
g = gradients_impl.gradients(y, x)
return g[0]
f = Foo()
with self.cached_session() as sess:
self.assertEqual(self.evaluate(f), 2.0)
def testGradientOfCaptured(self):
with ops.Graph().as_default():
x = constant_op.constant(1.0, name="x")
y = math_ops.multiply(x, 2.0, name="y")
@framework_function.Defun()
def Foo():
g = gradients_impl.gradients(y, x)
return g[0]
f = Foo()
with self.cached_session() as sess:
self.assertEqual(self.evaluate(f), 2.0)
def testCapturedResourceVariable(self):
with ops.Graph().as_default():
var = resource_variable_ops.ResourceVariable(1.0, name="var")
@function.defun()
def Foo():
y = math_ops.multiply(var, 2.0, name="y")
g = gradients_impl.gradients(y, var)
return g[0]
f = Foo()
with self.cached_session() as sess:
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(f), 2.0)
def testCapturedNested(self):
with ops.Graph().as_default():
x1 = constant_op.constant(1.0, name="x1")
x2 = constant_op.constant(2.0, name="x2")
x3 = math_ops.multiply(x1, x2, name="x3")
@function.defun()
def Outer():
outer1 = array_ops.identity(x1, name="outer1")
@function.defun()
def Inner():
inner1 = array_ops.identity(outer1, name="inner1")
inner2 = array_ops.identity(x2, name="inner2")
inner3 = array_ops.identity(x3, name="inner3")
return gradients_impl.gradients([inner1, inner2, inner3, x1],
[x1, x2])
return Inner()
x1_grad, x2_grad = Outer()
with self.cached_session() as sess:
# 1.0 + None + 2.0 + 1.0 = 4.0
self.assertEqual(self.evaluate(x1_grad), 4.0)
# None + 1.0 + 1.0 + None = 2.0
self.assertEqual(self.evaluate(x2_grad), 2.0)
def testCapturedFromFunction(self):
with ops.Graph().as_default():
x = constant_op.constant(1.0, name="x")
@function.defun()
def Outer():
y = math_ops.multiply(x, 2.0, name="y")
@function.defun()
def Inner():
z = math_ops.multiply(y, 3.0, name="z")
g = gradients_impl.gradients(z, y)
return g[0]
return Inner()
z_grad = Outer()
with self.cached_session() as sess:
self.assertEqual(self.evaluate(z_grad), 3.0)
def testCapturedEagerTensors(self):
# Test that we can handle captured eager tensors unrelated to the gradient
# computation (i.e. we need to ignore them).
# TODO(skyewm): make it an error if you try to take the gradient wrt a
# captured EagerTensor
with context.eager_mode():
c = constant_op.constant(2.0, name="c")
@function.defun
def Foo():
x = constant_op.constant(10.0, name="x")
y = math_ops.multiply(x, c, name="y")
z = math_ops.multiply(y, 3.0, name="z")
g = gradients_impl.gradients(z, x)
return g[0]
self.assertEqual(Foo().numpy(), 6.0)
class StopGradientTest(test_util.TensorFlowTestCase):
def testStopGradient(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[100, 32], name="in")
out = array_ops.stop_gradient(inp)
igrad = gradients.gradients(out, inp)[0]
assert igrad is None
class PreventGradientTest(test_util.TensorFlowTestCase):
def testPreventGradient(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[100, 32], name="in")
out = array_ops.prevent_gradient(inp)
with self.assertRaisesRegexp(LookupError, "explicitly disabled"):
_ = gradients.gradients(out, inp)
class HessianVectorProductTest(test_util.TensorFlowTestCase):
@test_util.run_v1_only("b/120545219")
def testHessianVectorProduct(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that HessianVectorProduct matches multiplication by the
# explicit Hessian.
# Specifically, the Hessian of f(x) = x^T A x is
# H = A + A^T.
# We expect HessianVectorProduct(f(x), x, v) to be H v.
m = 4
rng = np.random.RandomState([1, 2, 3])
mat_value = rng.randn(m, m).astype("float32")
v_value = rng.randn(m, 1).astype("float32")
x_value = rng.randn(m, 1).astype("float32")
hess_value = mat_value + mat_value.T
hess_v_value = np.dot(hess_value, v_value)
for use_gpu in [False, True]:
with self.cached_session(use_gpu=use_gpu):
mat = constant_op.constant(mat_value)
v = constant_op.constant(v_value)
x = constant_op.constant(x_value)
mat_x = math_ops.matmul(mat, x, name="Ax")
x_mat_x = math_ops.matmul(array_ops.transpose(x), mat_x, name="xAx")
hess_v = gradients_impl._hessian_vector_product(x_mat_x, [x], [v])[0]
hess_v_actual = self.evaluate(hess_v)
self.assertAllClose(hess_v_value, hess_v_actual)
class HessianTest(test_util.TensorFlowTestCase):
@test_util.run_v1_only("b/120545219")
def testHessian1D(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that `hessian` matches. Specifically, the Hessian of
# f(x) = x^T A x is H = A + A^T.
m = 4
rng = np.random.RandomState([1, 2, 3])
mat_value = rng.randn(m, m).astype("float32")
x_value = rng.randn(m).astype("float32")
hess_value = mat_value + mat_value.T
with self.session(use_gpu=True):
mat = constant_op.constant(mat_value)
x = constant_op.constant(x_value)
x_mat_x = math_ops.reduce_sum(x[:, None] * mat * x[None, :])
hess = gradients.hessians(x_mat_x, x)[0]
hess_actual = self.evaluate(hess)
self.assertAllClose(hess_value, hess_actual)
@test_util.run_v1_only("b/120545219")
def testHessian1D_multi(self):
# Test the computation of the hessian with respect to multiple tensors
m = 4
n = 3
rng = np.random.RandomState([1, 2, 3])
mat_values = [rng.randn(m, m).astype("float32") for _ in range(n)]
x_values = [rng.randn(m).astype("float32") for _ in range(n)]
hess_values = [mat_value + mat_value.T for mat_value in mat_values]
with self.session(use_gpu=True):
mats = [constant_op.constant(mat_value) for mat_value in mat_values]
xs = [constant_op.constant(x_value) for x_value in x_values]
xs_mats_xs = [
math_ops.reduce_sum(x[:, None] * mat * x[None, :])
for x, mat in zip(xs, mats)
]
hessians = gradients.hessians(xs_mats_xs, xs)
hessians_actual = [hess.eval() for hess in hessians]
for hess_value, hess_actual in zip(hess_values, hessians_actual):
self.assertAllClose(hess_value, hess_actual)
@test_util.run_v1_only("b/120545219")
def testHessianInvalidDimension(self):
for shape in [(10, 10), None]:
with self.cached_session(use_gpu=True):
x = array_ops.placeholder(dtypes.float32, shape)
# Expect a ValueError because the dimensions are wrong
with self.assertRaises(ValueError):
gradients.hessians(x, x)
@test_util.run_v1_only("b/120545219")
def testHessian2D_square_matrix(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that `hessian` matches. Specifically, the Hessian of
# f(x) = 1/2 * x^T * x is H = constant (block identity matrix)
m = 3
rng = np.random.RandomState([1, 2, 3])
x_value = rng.randn(m, m).astype("float32")
with self.session(use_gpu=True):
x = constant_op.constant(x_value)
x_square = math_ops.reduce_sum(
math_ops.matmul(array_ops.transpose(x), x) * 0.5
)
hess = gradients.hessians(x_square, x)[0]
hess_actual = self.evaluate(hess)
hess_value = np.bmat([
[elem*np.ones((m, m)) for elem in vec]
for vec in np.eye(m)
]).astype("float32")
self.assertAllEqual((m, m, m, m), hess_actual.shape)
self.assertAllClose(hess_value, hess_actual.reshape((m * m, m * m)))
@test_util.run_v1_only("b/120545219")
def testHessian2D_non_square_matrix(self):
m = 3
n = 4
rng = np.random.RandomState([1, 2, 3])
x_value = rng.randn(m, n).astype("float32")
with self.session(use_gpu=True):
x = constant_op.constant(x_value)
x_square = math_ops.reduce_sum(
math_ops.matmul(array_ops.transpose(x), x) * 0.5
)
hess = gradients.hessians(x_square, x)[0]
hess_actual = self.evaluate(hess)
hess_value = np.bmat([
[elem*np.ones((n, n)) for elem in vec]
for vec in np.eye(m)
]).astype("float32")
self.assertAllEqual((m, n, m, n), hess_actual.shape)
self.assertAllClose(hess_value, hess_actual.reshape((m * n, m * n)))
class IndexedSlicesToTensorTest(test_util.TensorFlowTestCase):
@test_util.run_v1_only("b/120545219")
def testIndexedSlicesToTensor(self):
with self.cached_session():
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
self.assertAllEqual(np_val.shape, c_sparse.dense_shape.eval())
c_dense = math_ops.multiply(c_sparse, 1.0)
self.assertAllClose(np_val, self.evaluate(c_dense))
@test_util.run_v1_only("b/120545219")
def testIndexedSlicesToTensorList(self):
with self.cached_session():
numpy_list = []
dense_list = []
sparse_list = []
for _ in range(3):
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
numpy_list.append(np_val)
dense_list.append(c)
sparse_list.append(c_sparse)
packed_dense = array_ops.stack(dense_list)
packed_sparse = array_ops.stack(sparse_list)
self.assertAllClose(packed_dense.eval(), self.evaluate(packed_sparse))
@test_util.run_v1_only("b/120545219")
def testInt64Indices(self):
with self.cached_session():
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
c_sparse = ops.IndexedSlices(
c_sparse.values,
math_ops.cast(c_sparse.indices, dtypes.int64), c_sparse.dense_shape)
self.assertAllEqual(np_val.shape, c_sparse.dense_shape.eval())
c_dense = math_ops.multiply(c_sparse, 1.0)
self.assertAllClose(np_val, self.evaluate(c_dense))
@test_util.run_v1_only("b/120545219")
def testWarnings(self):
# TODO(gunan) Reenable after this issue is fixed:
# https://github.com/google/protobuf/issues/2812
if sys.version_info >= (3, 5):
self.skipTest("Skipped test for Python 3.5+")
# Smaller than the threshold: no warning.
c_sparse = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32), constant([4, 4, 4, 4]))
with warnings.catch_warnings(record=True) as w:
math_ops.multiply(c_sparse, 1.0)
self.assertEqual(0, len(w))
# Greater than or equal to the threshold: warning.
c_sparse = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32), constant([100, 100, 100, 100]))
# "always" filter prevents the warning from being suppressed if it was
# already triggered in a different test.
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
math_ops.multiply(c_sparse, 1.0)
self.assertEqual(1, len(w))
self.assertTrue(
"with 100000000 elements. This may consume a large amount of memory." in
str(w[0].message))
# Unknown dense shape: warning.
c_sparse = ops.IndexedSlices(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32),
array_ops.placeholder(dtypes.int32))
with warnings.catch_warnings(record=True) as w:
math_ops.multiply(c_sparse, 1.0)
self.assertEqual(1, len(w))
self.assertTrue(
"of unknown shape. This may consume a large amount of memory." in
str(w[0].message))
class OnlyRealGradientsTest(test_util.TensorFlowTestCase):
@test_util.run_v1_only("b/120545219")
def testRealOnly(self):
x = constant_op.constant(7+3j, dtype=dtypes.complex64)
y = math_ops.square(x)
with self.assertRaisesRegexp(
TypeError,
r"Gradients of complex tensors must set grad_ys "
r"\(y\.dtype = tf\.complex64\)"):
gradients.gradients(y, x)
class ResourceCondTest(test_util.TensorFlowTestCase):
@test_util.run_v1_only("b/120545219")
def testBasic(self):
gamma = resource_variable_ops.ResourceVariable(
np.random.random((3,)),
dtype="float32", name="gamma")
inputs = array_ops.ones(shape=(3,), dtype="float32")
def TestFn():
output = inputs + gamma
return output
training = array_ops.placeholder_with_default(True, shape=())
output = control_flow_ops.cond(
training, TestFn, lambda: inputs)
loss = output
grads = gradients.gradients(
loss, [gamma])
self.assertTrue(None not in grads)
class CustomGradientTest(test_util.TensorFlowTestCase):
def testCustomGradientTrivial(self):
@custom_gradient.custom_gradient
def MyIdentity(x):
def Grad(dy):
return [3 * dy]
return x, Grad
with ops.Graph().as_default():
x = constant(3.)
y = MyIdentity(MyIdentity(x))
dy = gradients.gradients(y, x)[0]
with session.Session():
self.assertEqual(9., self.evaluate(dy))
def testCustomGradient(self):
@custom_gradient.custom_gradient
def MyMultiply(x1, x2):
result = x1 * x2
def Grad(dy):
# Switched the ordering here.
return [dy * x1, dy * x2]
return result, Grad
with ops.Graph().as_default():
x1 = constant(3.)
x2 = constant(5.)
y = MyMultiply(x1, x2)
dy = gradients.gradients(y, [x1, x2])
with session.Session() as sess:
self.assertAllEqual([3., 5.], self.evaluate(dy))
def testCustomGradientErrors(self):
@custom_gradient.custom_gradient
def F(x):
def Grad(_):
raise RuntimeError("x")
return x, Grad
with ops.Graph().as_default():
x = constant(1.0)
y = F(x)
with self.assertRaises(RuntimeError):
gradients.gradients(y, x)
def testCustomGradientWithVariables(self):
@custom_gradient.custom_gradient
def F(x):
out = core_layers.dense(x, 3, use_bias=False)
def Grad(out_grad, variables=None): # pylint: disable=redefined-outer-name
self.assertEqual(1, len(variables))
grads = gradients.gradients(out, [x, variables[0]], grad_ys=out_grad)
return grads[0], [array_ops.ones((4, 3))]
return out, Grad
with ops.Graph().as_default():
x = array_ops.ones((2, 4))
with variable_scope.variable_scope("f", use_resource=True) as vs:
y = F(x)
all_vars = vs.global_variables()
assert len(all_vars) == 1
grads = gradients.gradients(y, [x, all_vars[0]])
for g in grads:
self.assertTrue(g is not None)
with session.Session() as sess:
self.evaluate(variables.global_variables_initializer())
dw = sess.run(math_ops.reduce_sum(grads[1]))
self.assertEqual(12., dw)
def testCustomGradientWithVariablesEager(self):
with context.eager_mode():
layer = core_layers.Dense(4, use_bias=False)
@custom_gradient.custom_gradient
def F(x):
out = layer(x)
def Grad(out_grad, variables=None): # pylint: disable=redefined-outer-name
del out_grad
self.assertEqual(1, len(variables))
return (array_ops.ones((3, 2)),
[array_ops.ones((2, 4))])
return out, Grad
x = array_ops.ones((3, 2)) + 2.
with backprop.GradientTape() as tape:
tape.watch(x)
y = F(x)
w, = layer.variables
dx, dw = tape.gradient(y, [x, w])
self.assertEqual(6., math_ops.reduce_sum(dx).numpy())
self.assertEqual(8., math_ops.reduce_sum(dw).numpy())
@test_util.run_v1_only("b/120545219")
def testCustomGradientErrorsWithNonResourceVariables(self):
def F(x, use_resource=False):
with variable_scope.variable_scope("f", use_resource=use_resource):
out = core_layers.dense(x, 4, use_bias=False)
def Grad(out_grad, variables=None): # pylint: disable=redefined-outer-name
del out_grad
self.assertEqual(1, len(variables))
return (array_ops.ones((3, 2)), [array_ops.ones((2, 4))])
return out, Grad
@custom_gradient.custom_gradient
def FResource(x):
return F(x, use_resource=True)
@custom_gradient.custom_gradient
def FNonResource(x):
return F(x, use_resource=False)
x = array_ops.ones((3, 2)) + 2.
# Wrapping scope has use_resource=True but inner scope sets to False. Fails.
with variable_scope.variable_scope("vs1", use_resource=True):
with self.assertRaisesWithPredicateMatch(TypeError,
"must be `ResourceVariable`s"):
FNonResource(x)
# Wrapping scope has use_resource=False but inner scope sets to True.
# Passes.
with variable_scope.variable_scope("vs2", use_resource=False):
FResource(x)
def testWithNumpyInputs(self):
with context.eager_mode():
@custom_gradient.custom_gradient
def F(x):
out = x
def Grad(_):
return (None, None)
return out, Grad
x = np.ones((3, 2), dtype=np.float32)
# Smoke test to ensure numpy inputs are accepted
F(x)
@test_util.run_v1_only("b/120545219")
def testRVGradientsDynamicCond(self):
with self.cached_session():
alpha = resource_variable_ops.ResourceVariable(
np.random.random((1,)),
dtype="float32")
conditional = array_ops.placeholder_with_default(True, shape=())
output = control_flow_ops.cond(
conditional, lambda: alpha * 2, lambda: alpha * 3)
g, = gradients_impl.gradients(output, alpha)
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(g.eval(), [2.0])
self.assertAllEqual(g.eval(feed_dict={conditional: False}), [3.0])
class AggregateIndexedSlicesGradientsTest(test_util.TensorFlowTestCase):
def _assert_indexed_slices_equal(self, left, right):
self.assertAllEqual(
self.evaluate(ops.convert_to_tensor(left)),
self.evaluate(ops.convert_to_tensor(right)))
def testNoGradients(self):
self.assertIsNone(gradients_impl._AggregateIndexedSlicesGradients([]))
def testOneGradient(self):
t = math_ops._as_indexed_slices(constant_op.constant(
[[1., 2.], [0, 0], [3., 4.]]))
result = gradients_impl._AggregateIndexedSlicesGradients([t])
self._assert_indexed_slices_equal(t, result)
def testMultipleGradients(self):
t0 = math_ops._as_indexed_slices(constant_op.constant(
[[1., 2.], [0, 0], [3., 4.]]))
t1 = math_ops._as_indexed_slices(constant_op.constant(
[[0., 0.], [5, 6], [7., 8.]]))
total = constant_op.constant(
[[1., 2.], [5, 6], [10., 12.]])
result = gradients_impl._AggregateIndexedSlicesGradients([t0, t1])
self._assert_indexed_slices_equal(total, result)
def testMultipleGradientsWithNones(self):
t0 = math_ops._as_indexed_slices(constant_op.constant(
[[1., 2.], [0, 0], [3., 4.]]))
t1 = math_ops._as_indexed_slices(constant_op.constant(
[[0., 0.], [5, 6], [7., 8.]]))
t3 = None
total = constant_op.constant(
[[1., 2.], [5, 6], [10., 12.]])
result = gradients_impl._AggregateIndexedSlicesGradients([t0, t1, t3])
self._assert_indexed_slices_equal(total, result)
def testMixedTensorAndIndexedSlices(self):
t0 = math_ops._as_indexed_slices(constant_op.constant(
[[1., 2.], [0, 0], [3., 4.]]))
t1 = constant_op.constant(
[[0., 0.], [5, 6], [7., 8.]])
total = constant_op.constant(
[[1., 2.], [5, 6], [10., 12.]])
result = gradients_impl._AggregateIndexedSlicesGradients([t0, t1])
self._assert_indexed_slices_equal(total, result)
class TensorListGradientsTest(test_util.TensorFlowTestCase):
def testDefaultGradYs(self):
with ops.Graph().as_default():
tl = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=ops.convert_to_tensor([], dtype=dtypes.int32))
a = constant(1.0)
tl = list_ops.tensor_list_push_back(tl, a)
grad_tl = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=ops.convert_to_tensor([], dtype=dtypes.int32))
grad_tl = list_ops.tensor_list_push_back(tl, constant(5.0))
grad = gradients.gradients(tl, a, grad_ys=grad_tl)[0]
with self.cached_session() as sess:
self.assertEquals(self.evaluate(grad), 5.)
if __name__ == "__main__":
googletest.main()
| {
"content_hash": "a3f0bb5f15b03589d52ab11a65891255",
"timestamp": "",
"source": "github",
"line_count": 1090,
"max_line_length": 84,
"avg_line_length": 36.35963302752294,
"alnum_prop": 0.6294660880096892,
"repo_name": "Bismarrck/tensorflow",
"id": "c53afef63bc1d2fc1ba1927c687f7ecad4eb46a4",
"size": "40321",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/gradients_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4882"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "493885"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "53117668"
},
{
"name": "CMake",
"bytes": "207176"
},
{
"name": "Dockerfile",
"bytes": "39024"
},
{
"name": "Go",
"bytes": "1303624"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "893928"
},
{
"name": "Jupyter Notebook",
"bytes": "2657814"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "68402"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102511"
},
{
"name": "PHP",
"bytes": "5172"
},
{
"name": "Pascal",
"bytes": "221"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "43480067"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "497472"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
} |
import os
from rbuild.productstore import dirstore
from testutils import mock
from rbuild_test import rbuildhelp, mockproddef
class EditTest(rbuildhelp.RbuildHelper):
def newProductDefinition(self):
self.cfg.initializeFlavors()
return mockproddef.getProductDefinition(self.cfg)
def testCommand(self):
handle = self.getRbuildHandle()
handle.Edit.registerCommands()
handle.Edit.initialize()
cmd = handle.Commands.getCommandClass('edit')()
mock.mockMethod(handle.Edit.editProductDefinition)
cmd.runCommand(handle, {}, ['rbuild', 'edit', 'product'])
handle.Edit.editProductDefinition._mock.assertCalled('rbuild commit')
cmd.runCommand(handle, dict(message='blip'),
['rbuild', 'edit', 'product'])
handle.Edit.editProductDefinition._mock.assertCalled('blip')
def testEditProductDefinition(self):
proddef = self.newProductDefinition()
projDir = os.path.join(self.workDir, 'myproject')
prodDefDir = os.path.join(projDir, '.rbuild/product-definition')
prodDefPath = os.path.join(prodDefDir, 'product-definition.xml')
os.makedirs(prodDefDir)
proddef.serialize(file(prodDefPath, "w"))
productStore = dirstore.CheckoutProductStore(baseDirectory=projDir)
handle = self.getRbuildHandle(productStore=productStore)
mock.mock(handle.facade, 'conary')
facade = handle.facade.conary
message = "commit message"
# Return a consistent temp file
tmpf = handle.Edit._makeTemporaryFile()
mock.mockMethod(handle.Edit._makeTemporaryFile, tmpf)
class MockMethod(object):
def __init__(self):
self.retval = None
self.callList = []
self.realFunction = None
self._idx = 0
def __call__(self, *args, **kwargs):
self.callList.append((args, kwargs))
if self.realFunction is None:
return self.retval
if isinstance(self.realFunction, list):
func = self.realFunction[self._idx]
self._idx += 1
else:
func = self.realFunction
return func(*args, **kwargs)
def reset(self):
self._idx = 0
del self.callList[:]
invEditor = MockMethod()
self.mock(handle.Edit, '_invokeEditor', invEditor)
# Simulate edit error
invEditor.retval = 1
self.assertEquals(handle.Edit.editProductDefinition(message), 1)
tmpf.seek(0); tmpf.truncate(); invEditor.reset()
# Simulate no change (mtime of file doesn't change)
invEditor.retval = 0
self.assertEquals(handle.Edit.editProductDefinition(message), 0)
tmpf.seek(0); tmpf.truncate(); invEditor.reset()
def _changedProdDef(stream):
# Change the proddef
prod = self.newProductDefinition()
prod.setProductName('awesome name, changed')
stream.seek(0); stream.truncate()
prod.serialize(stream)
return 0
invEditor.realFunction = _changedProdDef
self.assertEquals(handle.Edit.editProductDefinition(message), 0)
self.assertEquals(len(invEditor.callList), 1)
facade.commit._mock.assertCalled(prodDefDir, message=message)
# Test some of the more possible errors
tmpf.seek(0); tmpf.truncate(); invEditor.reset()
def _invalidXml(stream):
stream.seek(0); stream.truncate()
stream.write("<invalid xml")
stream.flush()
return 0
invEditor.realFunction = _invalidXml
uiInput = mock.mockMethod(handle.ui.input)
uiInput._mock.setReturn('n', 'Do you want to retry? (Y/n) ')
self.assertEquals(handle.Edit.editProductDefinition(message), 3)
# Invalid xml first, then correct one
tmpf.seek(0); tmpf.truncate(); invEditor.reset()
invEditor.realFunction = [ _invalidXml, _invalidXml, _changedProdDef ]
uiInput._mock.setReturn('y', 'Do you want to retry? (Y/n) ')
self.assertEquals(handle.Edit.editProductDefinition(message), 0)
facade.commit._mock.assertCalled(prodDefDir, message=message)
def _xmlNoNamespace(stream):
stream.seek(0); stream.truncate()
stream.write("<productDefinition/>")
stream.flush()
return 0
tmpf.seek(0); tmpf.truncate(); invEditor.reset()
invEditor.realFunction = _xmlNoNamespace
uiInput._mock.setReturn('n', 'Do you want to retry? (Y/n) ')
self.assertEquals(handle.Edit.editProductDefinition(message), 1)
def _xmlNamespacedNoVersion(stream):
stream.seek(0); stream.truncate()
stream.write("<productDefinition xmlns='http://dummy'/>")
stream.flush()
return 0
tmpf.seek(0); tmpf.truncate(); invEditor.reset()
invEditor.realFunction = _xmlNamespacedNoVersion
uiInput._mock.setReturn('n', 'Do you want to retry? (Y/n) ')
self.assertEquals(handle.Edit.editProductDefinition(message), 2)
# XXX should test an invalid proddef too
| {
"content_hash": "93b058aadeeffae99a9e6a841a1cbed2",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 78,
"avg_line_length": 38.613138686131386,
"alnum_prop": 0.6179584120982987,
"repo_name": "fedora-conary/rbuild",
"id": "4d83152afbc004ca1f2b1bb98c70b0903d0f72a5",
"size": "5895",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rbuild_test/unit_test/pluginstest/edittest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "686899"
},
{
"name": "Shell",
"bytes": "3446"
}
],
"symlink_target": ""
} |
import re
from pallium.condition import GangliaBooleanTree
from pallium.config import load_json_config, _STR_RE_VALID_TAG, valid_tag
from pallium.util import SuperDict, files_from_dir
DEFAULT_ALERT = {
"name": None,
"description": None,
"options": {
"threshold": 1,
"priority": "low",
"grouped": False
},
"filter": {
"grid": ".*",
"cluster": ".*",
"host": ".*",
},
"rule": [],
}
DEFAULT_METALERT = {
"name": None,
"description": None,
"options": {
"threshold": 1,
"priority": "low",
},
"rule": [],
}
class InvalidAlert(AttributeError): pass
class BaseAlert(SuperDict):
required_settings = []
default = {}
def __init__(self, data):
SuperDict.__init__(self, self.default)
self._load_config(data)
def _load_config(self, data):
data = self.load_config(data)
self.recursive_update(data)
self._validate_alert()
self._convert_data()
def _validate_alert(self):
for key in self.required_settings:
if not self.get(key, None):
raise InvalidAlert("key '%s' is not set in alert '%s'" % \
(key, self))
name = self.get('name')
# must have a valid tag name
if not valid_tag(name):
raise ValueError("invalid alert name '%s', must match regex '%s'" % \
(name, _STR_RE_VALID_TAG))
self._validate_rule(self['rule'])
def _validate_rule(self, rule):
if not GangliaBooleanTree.is_boolean_tree(rule):
raise InvalidAlert(
"the alert rule in '%s' must be a boolean tree" % self
)
def _convert_data(self):
pass
def load_config(self, data):
raise NotImplementedError
class Alert(BaseAlert):
required_settings = [ "name", "description" ]
default = DEFAULT_ALERT
def _convert_data(self):
for key in [ "grid", "cluster", "host" ]:
self["filter"][key] = re.compile(self["filter"][key])
def load_config(self, data):
raise NotImplementedError
class AlertJsonLoader(object):
def load_config(self, data):
return load_json_config(data)
class AlertDictLoader(object):
def load_config(self, data):
if not isinstance(data, dict):
raise InvalidAlert("invalid data type for alert '%s'" % data)
return data
class JsonAlert(AlertJsonLoader, Alert): pass
class DictAlert(AlertDictLoader, Alert): pass
class Metalert(BaseAlert):
required_settings = [ "name", "description" ]
default = DEFAULT_METALERT
class JsonMetalert(AlertJsonLoader, Metalert): pass
class DictMetalert(AlertDictLoader, Metalert): pass
def load_alerts(directory, alert_cls):
"""
Loads alerts into a dictionary datastore of the form::
{
"alert_name1": {
... alert structure ...
},
"alert_name2": {
... alert structure ...
},
...
}
The alert 'name' is extracted from the alert structure and used as
the key. Alert names must be unique
"""
files = files_from_dir(directory)
store = {}
for file in files:
alert = dict(alert_cls(file))
name = alert['name']
if name in store:
raise Exception # duplicate alert name
store[name] = alert
del store[name]['name']
return store
| {
"content_hash": "68bfbc2e49a8fc65a19bc0113fb80459",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 81,
"avg_line_length": 25.597014925373134,
"alnum_prop": 0.580466472303207,
"repo_name": "jcmcken/pallium",
"id": "63fc5466522d9de4c6944dc86682941f18ecbbca",
"size": "3431",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pallium/alerts.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "19376"
},
{
"name": "Shell",
"bytes": "92"
}
],
"symlink_target": ""
} |
import pyPdf
def getPDFContent(path):
content = ""
# Load PDF into pyPDF
pdf = pyPdf.PdfFileReader(file(path, "rb"))
# Iterate pages
for i in range(0, pdf.getNumPages()):
# Extract text from page and add to content
content += pdf.getPage(i).extractText() + "\n"
# Collapse whitespace
content = " ".join(content.replace("\xa0", " ").strip().split())
return content
print getPDFContent("CONSTITUCION-Interiores.pdf").encode("ascii", "ignore")
| {
"content_hash": "42b241f03089f3cec785d4420e2a02a0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 76,
"avg_line_length": 32.733333333333334,
"alnum_prop": 0.6435845213849287,
"repo_name": "lamahechag/CC-QuantitativeAnalysis",
"id": "48a555ae66d77a0485327ebde108d1e6681a5a47",
"size": "491",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PdfContent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "853"
}
],
"symlink_target": ""
} |
"""This module provides a function for reading dxf files and parsing them into a useful tree of objects and data.
The convert function is called by the readDXF fuction to convert dxf strings into the correct data based
on their type code. readDXF expects a (full path) file name as input.
"""
# --------------------------------------------------------------------------
# DXF Reader v0.9 by Ed Blake (AKA Kitsu)
# 2008.05.08 modif.def convert() by Remigiusz Fiedler (AKA migius)
# --------------------------------------------------------------------------
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# ***** END GPL LICENCE BLOCK *****
# --------------------------------------------------------------------------
from dxfImportObjects import *
class Object:
"""Empty container class for dxf objects"""
def __init__(self, _type='', block=False):
"""_type expects a string value."""
self.type = _type
self.name = ''
self.data = []
def __str__(self):
if self.name:
return self.name
else:
return self.type
def __repr__(self):
return str(self.data)
def get_type(self, kind=''):
"""Despite the name, this method actually returns all objects of type 'kind' from self.data."""
if type:
objects = []
for item in self.data:
if type(item) != list and item.type == kind:
# we want this type of object
objects.append(item)
elif type(item) == list and item[0] == kind:
# we want this type of data
objects.append(item[1])
return objects
class InitializationError(Exception): pass
class StateMachine:
"""(finite) State Machine from the great David Mertz's great Charming Python article."""
def __init__(self):
self.handlers = []
self.startState = None
self.endStates = []
def add_state(self, handler, end_state=0):
"""All states and handlers are functions which return
a state and a cargo."""
self.handlers.append(handler)
if end_state:
self.endStates.append(handler)
def set_start(self, handler):
"""Sets the starting handler function."""
self.startState = handler
def run(self, cargo=None):
if not self.startState:
raise InitializationError,\
"must call .set_start() before .run()"
if not self.endStates:
raise InitializationError, \
"at least one state must be an end_state"
handler = self.startState
while 1:
(newState, cargo) = handler(cargo)
#print cargo
if newState in self.endStates:
return newState(cargo)
#break
elif newState not in self.handlers:
raise RuntimeError, "Invalid target %s" % newState
else:
handler = newState
def get_name(data):
"""Get the name of an object from its object data.
Returns a pair of (data_item, name) where data_item is the list entry where the name was found
(the data_item can be used to remove the entry from the object data). Be sure to check
name not None before using the returned values!
"""
value = None
for item in data:
if item[0] == 2:
value = item[1]
break
return item, value
def get_layer(data):
"""Expects object data as input.
Returns (entry, layer_name) where entry is the data item that provided the layer name.
"""
value = None
for item in data:
if item[0] == 8:
value = item[1]
break
return item, value
def convert(code, value):
"""Convert a string to the correct Python type based on its dxf code.
code types:
ints = 60-79, 170-179, 270-289, 370-389, 400-409, 1060-1070
longs = 90-99, 420-429, 440-459, 1071
floats = 10-39, 40-59, 110-139, 140-149, 210-239, 460-469, 1010-1059
hex = 105, 310-379, 390-399
strings = 0-9, 100, 102, 300-309, 410-419, 430-439, 470-479, 999, 1000-1009
"""
if 59 < code < 80 or 169 < code < 180 or 269 < code < 290 or 369 < code < 390 or 399 < code < 410 or 1059 < code < 1071:
value = int(float(value))
elif 89 < code < 100 or 419 < code < 430 or 439 < code < 460 or code == 1071:
value = long(float(value))
elif 9 < code < 60 or 109 < code < 150 or 209 < code < 240 or 459 < code < 470 or 1009 < code < 1060:
value = float(value)
elif code == 105 or 309 < code < 380 or 389 < code < 400:
value = int(value, 16) # should be left as string?
else: # it's already a string so do nothing
pass
return value
def findObject(infile, kind=''):
"""Finds the next occurance of an object."""
obj = False
while 1:
line = infile.readline()
if not line: # readline returns '' at eof
return False
if not obj: # We're still looking for our object code
if line.lower().strip() == '0':
obj = True # found it
else: # we are in an object definition
if kind: # if we're looking for a particular kind
if line.lower().strip() == kind:
obj = Object(line.lower().strip())
break
else: # otherwise take anything non-numeric
if line.lower().strip() not in string.digits:
obj = Object(line.lower().strip())
break
obj = False # whether we found one or not it's time to start over
return obj
def handleObject(infile):
"""Add data to an object until end of object is found."""
line = infile.readline()
if line.lower().strip() == 'section':
return 'section' # this would be a problem
elif line.lower().strip() == 'endsec':
return 'endsec' # this means we are done with a section
else: # add data to the object until we find a new object
obj = Object(line.lower().strip())
obj.name = obj.type
done = False
data = []
while not done:
line = infile.readline()
if not data:
if line.lower().strip() == '0':
#we've found an object, time to return
return obj
else:
# first part is always an int
data.append(int(line.lower().strip()))
else:
data.append(convert(data[0], line.strip()))
obj.data.append(data)
data = []
def handleTable(table, infile):
"""Special handler for dealing with nested table objects."""
item, name = get_name(table.data)
if name: # We should always find a name
table.data.remove(item)
table.name = name.lower()
# This next bit is from handleObject
# handleObject should be generalized to work with any section like object
while 1:
obj = handleObject(infile)
if obj.type == 'table':
print "Warning: previous table not closed!"
return table
elif obj.type == 'endtab':
return table # this means we are done with the table
else: # add objects to the table until one of the above is found
table.data.append(obj)
def handleBlock(block, infile):
"""Special handler for dealing with nested table objects."""
item, name = get_name(block.data)
if name: # We should always find a name
block.data.remove(item)
block.name = name
# This next bit is from handleObject
# handleObject should be generalized to work with any section like object
while 1:
obj = handleObject(infile)
if obj.type == 'block':
print "Warning: previous block not closed!"
return block
elif obj.type == 'endblk':
return block # this means we are done with the table
else: # add objects to the table until one of the above is found
block.data.append(obj)
"""These are the states/functions used in the State Machine.
states:
start - find first section
start_section - add data, find first object
object - add obj-data, watch for next obj (called directly by start_section)
end_section - look for next section or eof
end - return results
"""
def start(cargo):
"""Expects the infile as cargo, initializes the cargo."""
#print "Entering start state!"
infile = cargo
drawing = Object('drawing')
section = findObject(infile, 'section')
if section:
return start_section, (infile, drawing, section)
else:
return error, (infile, "Failed to find any sections!")
def start_section(cargo):
"""Expects [infile, drawing, section] as cargo, builds a nested section object."""
#print "Entering start_section state!"
infile = cargo[0]
drawing = cargo[1]
section = cargo[2]
# read each line, if it is an object declaration go to object mode
# otherwise create a [index, data] pair and add it to the sections data.
done = False
data = []
while not done:
line = infile.readline()
if not data: # if we haven't found a dxf code yet
if line.lower().strip() == '0':
# we've found an object
while 1: # no way out unless we find an end section or a new section
obj = handleObject(infile)
if obj == 'section': # shouldn't happen
print "Warning: failed to close previous section!"
return end_section, (infile, drawing)
elif obj == 'endsec': # This section is over, look for the next
drawing.data.append(section)
return end_section, (infile, drawing)
elif obj.type == 'table': # tables are collections of data
obj = handleTable(obj, infile) # we need to find all there contents
section.data.append(obj) # before moving on
elif obj.type == 'block': # the same is true of blocks
obj = handleBlock(obj, infile) # we need to find all there contents
section.data.append(obj) # before moving on
else: # found another sub-object
section.data.append(obj)
else:
data.append(int(line.lower().strip()))
else: # we have our code, now we just need to convert the data and add it to our list.
data.append(convert(data[0], line.strip()))
section.data.append(data)
data = []
def end_section(cargo):
"""Expects (infile, drawing) as cargo, searches for next section."""
#print "Entering end_section state!"
infile = cargo[0]
drawing = cargo[1]
section = findObject(infile, 'section')
if section:
return start_section, (infile, drawing, section)
else:
return end, (infile, drawing)
def end(cargo):
"""Expects (infile, drawing) as cargo, called when eof has been reached."""
#print "Entering end state!"
infile = cargo[0]
drawing = cargo[1]
#infile.close()
return drawing
def error(cargo):
"""Expects a (infile, string) as cargo, called when there is an error during processing."""
#print "Entering error state!"
infile = cargo[0]
err = cargo[1]
infile.close()
print "There has been an error:"
print err
return False
def readDXF(filename):
"""Given a file name try to read it as a dxf file.
Output is an object with the following structure
drawing
header
header data
classes
class data
tables
table data
blocks
block data
entities
entity data
objects
object data
where foo data is a list of sub-objects. True object data
is of the form [code, data].
"""
infile = open(filename)
sm = StateMachine()
sm.add_state(error, True)
sm.add_state(end, True)
sm.add_state(start_section)
sm.add_state(end_section)
sm.add_state(start)
sm.set_start(start)
try:
drawing = sm.run(infile)
if drawing:
drawing.name = filename
for obj in drawing.data:
item, name = get_name(obj.data)
if name:
obj.data.remove(item)
obj.name = name.lower()
setattr(drawing, name.lower(), obj)
# Call the objectify function to cast
# raw objects into the right types of object
obj.data = objectify(obj.data)
#print obj.name
finally:
infile.close()
return drawing
if __name__ == "__main__":
filename = r".\examples\block-test.dxf"
drawing = readDXF(filename)
for item in drawing.entities.data:
print item
| {
"content_hash": "6b488d00984b72e087db2d22ff56c41c",
"timestamp": "",
"source": "github",
"line_count": 381,
"max_line_length": 121,
"avg_line_length": 31.089238845144358,
"alnum_prop": 0.6683832840861123,
"repo_name": "JohnyEngine/CNC",
"id": "85cbc53b9ddd67a9459fe260e2b8198469211464",
"size": "11845",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deprecated/heekspython/examples/dxfReader.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "30017"
},
{
"name": "Batchfile",
"bytes": "10126"
},
{
"name": "C",
"bytes": "209705"
},
{
"name": "C++",
"bytes": "7311456"
},
{
"name": "CMake",
"bytes": "92171"
},
{
"name": "Inno Setup",
"bytes": "29066"
},
{
"name": "Makefile",
"bytes": "16079"
},
{
"name": "Objective-C",
"bytes": "8124"
},
{
"name": "Python",
"bytes": "1182253"
},
{
"name": "Shell",
"bytes": "9694"
}
],
"symlink_target": ""
} |
"""Moduyle containing the Reload plugin.
This plugin is derived from the Cherrypy Autoreloader monitor
and is used to reload a modified module through the Python
Aboard autoloader.
"""
import os
from cherrypy.process.plugins import Autoreloader
class Reloader(Autoreloader):
"""Class containing the Cherrypy reloader plugin.
It inherits from the Cherrypy Autoreloader plugin to slightly
modify its behaviour. Instead of reloading a module, this
plugin ensures that the Python Aboard Autoloader is called
to do this job (if it can, of course).
"""
def run(self):
"""Reload the process if registered files have been modified."""
for filename in self.sysfiles() | self.files:
if filename:
if filename.endswith('.pyc'):
filename = filename[:-1]
oldtime = self.mtimes.get(filename, 0)
if oldtime is None:
# Module with no .py file. Skip it.
continue
try:
mtime = os.stat(filename).st_mtime
except OSError:
# Either a module with no .py file, or it's been deleted.
mtime = None
if filename not in self.mtimes:
# If a module has no .py file, this will be None.
self.mtimes[filename] = mtime
else:
if mtime is None or mtime > oldtime:
# The file has been deleted or modified.
self.bus.log("The module {} changed, try to reload "
"it.".format(filename))
path = os.path.relpath(filename)[:-3]
self.loader.reload_module(path)
if mtime:
self.mtimes[filename] = mtime
| {
"content_hash": "0c25bc24ab0858e9030cb352753acb68",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 77,
"avg_line_length": 36.075471698113205,
"alnum_prop": 0.5387029288702929,
"repo_name": "v-legoff/pa-poc2",
"id": "4389e237c87e8c63744b5301653c6f3315f89a48",
"size": "3455",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ext/aboard/server/plugins/reloader.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "5771"
},
{
"name": "Python",
"bytes": "306982"
}
],
"symlink_target": ""
} |
import base64
import datetime
import logging
import urllib
from functools import wraps
from io import BytesIO
from typing import Callable, Dict, Optional, Sequence, Set, Tuple, TypeVar, Union, cast, overload
import django_otp
import orjson
from circuitbreaker import CircuitBreakerError, circuit
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth import login as django_login
from django.contrib.auth.decorators import user_passes_test as django_user_passes_test
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.views import redirect_to_login
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict
from django.http.multipartparser import MultiPartParser
from django.shortcuts import resolve_url
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from django.views.decorators.csrf import csrf_exempt
from django_otp import user_has_device
from two_factor.utils import default_device
from zerver.lib.cache import cache_with_key
from zerver.lib.exceptions import (
AccessDeniedError,
AnomalousWebhookPayload,
ErrorCode,
InvalidAPIKeyError,
InvalidAPIKeyFormatError,
InvalidJSONError,
JsonableError,
OrganizationAdministratorRequired,
OrganizationMemberRequired,
OrganizationOwnerRequired,
RateLimited,
RealmDeactivatedError,
RemoteServerDeactivatedError,
UnsupportedWebhookEventType,
UserDeactivatedError,
WebhookError,
)
from zerver.lib.queue import queue_json_publish
from zerver.lib.rate_limiter import RateLimitedIPAddr, RateLimitedUser
from zerver.lib.request import REQ, RequestNotes, has_request_variables
from zerver.lib.response import json_method_not_allowed, json_success, json_unauthorized
from zerver.lib.subdomains import get_subdomain, user_matches_subdomain
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
from zerver.lib.types import ViewFuncT
from zerver.lib.utils import has_api_key_format, statsd
from zerver.models import Realm, UserProfile, get_client, get_user_profile_by_api_key
if settings.ZILENCER_ENABLED:
from zilencer.models import (
RateLimitedRemoteZulipServer,
RemoteZulipServer,
get_remote_server_by_uuid,
)
rate_limiter_logger = logging.getLogger("zerver.lib.rate_limiter")
webhook_logger = logging.getLogger("zulip.zerver.webhooks")
webhook_unsupported_events_logger = logging.getLogger("zulip.zerver.webhooks.unsupported")
webhook_anomalous_payloads_logger = logging.getLogger("zulip.zerver.webhooks.anomalous")
FuncT = TypeVar("FuncT", bound=Callable[..., object])
def cachify(method: FuncT) -> FuncT:
dct: Dict[Tuple[object, ...], object] = {}
def cache_wrapper(*args: object) -> object:
tup = tuple(args)
if tup in dct:
return dct[tup]
result = method(*args)
dct[tup] = result
return result
return cast(FuncT, cache_wrapper) # https://github.com/python/mypy/issues/1927
def update_user_activity(
request: HttpRequest, user_profile: UserProfile, query: Optional[str]
) -> None:
# update_active_status also pushes to RabbitMQ, and it seems
# redundant to log that here as well.
if request.META["PATH_INFO"] == "/json/users/me/presence":
return
request_notes = RequestNotes.get_notes(request)
if query is not None:
pass
elif request_notes.query is not None:
query = request_notes.query
else:
query = request.META["PATH_INFO"]
assert request_notes.client is not None
event = {
"query": query,
"user_profile_id": user_profile.id,
"time": datetime_to_timestamp(timezone_now()),
"client_id": request_notes.client.id,
}
queue_json_publish("user_activity", event, lambda event: None)
# Based on django.views.decorators.http.require_http_methods
def require_post(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapper(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
if request.method != "POST":
err_method = request.method
logging.warning(
"Method Not Allowed (%s): %s",
err_method,
request.path,
extra={"status_code": 405, "request": request},
)
if RequestNotes.get_notes(request).error_format == "JSON":
return json_method_not_allowed(["POST"])
else:
return TemplateResponse(
request, "404.html", context={"status_code": 405}, status=405
)
return func(request, *args, **kwargs)
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
def require_realm_owner(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapper(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if not user_profile.is_realm_owner:
raise OrganizationOwnerRequired()
return func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
def require_realm_admin(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapper(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if not user_profile.is_realm_admin:
raise OrganizationAdministratorRequired()
return func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
def require_organization_member(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapper(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if user_profile.role > UserProfile.ROLE_MEMBER:
raise OrganizationMemberRequired()
return func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
def require_billing_access(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapper(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if not user_profile.has_billing_access:
raise JsonableError(_("Must be a billing administrator or an organization owner"))
return func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
def process_client(
request: HttpRequest,
user: Union[UserProfile, AnonymousUser],
*,
is_browser_view: bool = False,
client_name: Optional[str] = None,
skip_update_user_activity: bool = False,
query: Optional[str] = None,
) -> None:
request_notes = RequestNotes.get_notes(request)
if client_name is None:
client_name = request_notes.client_name
assert client_name is not None
# We could check for a browser's name being "Mozilla", but
# e.g. Opera and MobileSafari don't set that, and it seems
# more robust to just key off whether it was a browser view
if is_browser_view and not client_name.startswith("Zulip"):
# Avoid changing the client string for browsers, but let
# the Zulip desktop apps be themselves.
client_name = "website"
request_notes.client = get_client(client_name)
if not skip_update_user_activity and user.is_authenticated:
update_user_activity(request, user, query)
class InvalidZulipServerError(JsonableError):
code = ErrorCode.INVALID_ZULIP_SERVER
data_fields = ["role"]
def __init__(self, role: str) -> None:
self.role: str = role
@staticmethod
def msg_format() -> str:
return "Zulip server auth failure: {role} is not registered -- did you run `manage.py register_server`?"
class InvalidZulipServerKeyError(InvalidZulipServerError):
@staticmethod
def msg_format() -> str:
return "Zulip server auth failure: key does not match role {role}"
def validate_api_key(
request: HttpRequest,
role: Optional[str],
api_key: str,
allow_webhook_access: bool = False,
client_name: Optional[str] = None,
) -> Union[UserProfile, "RemoteZulipServer"]:
# Remove whitespace to protect users from trivial errors.
api_key = api_key.strip()
if role is not None:
role = role.strip()
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and "@" not in role:
try:
remote_server = get_remote_server_by_uuid(role)
except RemoteZulipServer.DoesNotExist:
raise InvalidZulipServerError(role)
if api_key != remote_server.api_key:
raise InvalidZulipServerKeyError(role)
if remote_server.deactivated:
raise RemoteServerDeactivatedError()
if get_subdomain(request) != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
raise JsonableError(_("Invalid subdomain for push notifications bouncer"))
request.user = remote_server
remote_server.rate_limits = ""
# Skip updating UserActivity, since remote_server isn't actually a UserProfile object.
process_client(request, remote_server, skip_update_user_activity=True)
return remote_server
user_profile = access_user_by_api_key(request, api_key, email=role)
if user_profile.is_incoming_webhook and not allow_webhook_access:
raise JsonableError(_("This API is not available to incoming webhook bots."))
request.user = user_profile
process_client(request, user_profile, client_name=client_name)
return user_profile
def validate_account_and_subdomain(request: HttpRequest, user_profile: UserProfile) -> None:
if user_profile.realm.deactivated:
raise RealmDeactivatedError()
if not user_profile.is_active:
raise UserDeactivatedError()
# Either the subdomain matches, or we're accessing Tornado from
# and to localhost (aka spoofing a request as the user).
if not user_matches_subdomain(get_subdomain(request), user_profile) and not (
settings.RUNNING_INSIDE_TORNADO
and request.META["SERVER_NAME"] == "127.0.0.1"
and request.META["REMOTE_ADDR"] == "127.0.0.1"
):
logging.warning(
"User %s (%s) attempted to access API on wrong subdomain (%s)",
user_profile.delivery_email,
user_profile.realm.subdomain,
get_subdomain(request),
)
raise JsonableError(_("Account is not associated with this subdomain"))
def access_user_by_api_key(
request: HttpRequest, api_key: str, email: Optional[str] = None
) -> UserProfile:
if not has_api_key_format(api_key):
raise InvalidAPIKeyFormatError()
try:
user_profile = get_user_profile_by_api_key(api_key)
except UserProfile.DoesNotExist:
raise InvalidAPIKeyError()
if email is not None and email.lower() != user_profile.delivery_email.lower():
# This covers the case that the API key is correct, but for a
# different user. We may end up wanting to relaxing this
# constraint or give a different error message in the future.
raise InvalidAPIKeyError()
validate_account_and_subdomain(request, user_profile)
return user_profile
def log_unsupported_webhook_event(summary: str) -> None:
# This helper is primarily used by some of our more complicated
# webhook integrations (e.g. GitHub) that need to log an unsupported
# event based on attributes nested deep within a complicated JSON
# payload. In such cases, the error message we want to log may not
# really fit what a regular UnsupportedWebhookEventType exception
# represents.
webhook_unsupported_events_logger.exception(summary, stack_info=True)
def log_exception_to_webhook_logger(err: Exception) -> None:
if isinstance(err, AnomalousWebhookPayload):
webhook_anomalous_payloads_logger.exception(str(err), stack_info=True)
elif isinstance(err, UnsupportedWebhookEventType):
webhook_unsupported_events_logger.exception(str(err), stack_info=True)
else:
webhook_logger.exception(str(err), stack_info=True)
def full_webhook_client_name(raw_client_name: Optional[str] = None) -> Optional[str]:
if raw_client_name is None:
return None
return f"Zulip{raw_client_name}Webhook"
# Use this for webhook views that don't get an email passed in.
def webhook_view(
webhook_client_name: str,
notify_bot_owner_on_invalid_json: bool = True,
all_event_types: Optional[Sequence[str]] = None,
) -> Callable[[Callable[..., HttpResponse]], Callable[..., HttpResponse]]:
# Unfortunately, callback protocols are insufficient for this:
# https://mypy.readthedocs.io/en/stable/protocols.html#callback-protocols
# Variadic generics are necessary: https://github.com/python/typing/issues/193
def _wrapped_view_func(view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
@csrf_exempt
@has_request_variables
@wraps(view_func)
def _wrapped_func_arguments(
request: HttpRequest, api_key: str = REQ(), *args: object, **kwargs: object
) -> HttpResponse:
user_profile = validate_api_key(
request,
None,
api_key,
allow_webhook_access=True,
client_name=full_webhook_client_name(webhook_client_name),
)
if settings.RATE_LIMITING:
rate_limit_user(request, user_profile, domain="api_by_user")
try:
return view_func(request, user_profile, *args, **kwargs)
except Exception as err:
if isinstance(err, InvalidJSONError) and notify_bot_owner_on_invalid_json:
# NOTE: importing this at the top of file leads to a
# cyclic import; correct fix is probably to move
# notify_bot_owner_about_invalid_json to a smaller file.
from zerver.lib.webhooks.common import notify_bot_owner_about_invalid_json
notify_bot_owner_about_invalid_json(user_profile, webhook_client_name)
elif isinstance(err, JsonableError) and not isinstance(err, WebhookError):
pass
else:
if isinstance(err, WebhookError):
err.webhook_name = webhook_client_name
log_exception_to_webhook_logger(err)
raise err
_wrapped_func_arguments._all_event_types = all_event_types
return _wrapped_func_arguments
return _wrapped_view_func
def zulip_redirect_to_login(
request: HttpRequest,
login_url: Optional[str] = None,
redirect_field_name: str = REDIRECT_FIELD_NAME,
) -> HttpResponseRedirect:
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login URL is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urllib.parse.urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urllib.parse.urlparse(path)[:2]
if (not login_scheme or login_scheme == current_scheme) and (
not login_netloc or login_netloc == current_netloc
):
path = request.get_full_path()
if path == "/":
# Don't add ?next=/, to keep our URLs clean
return HttpResponseRedirect(resolved_login_url)
return redirect_to_login(path, resolved_login_url, redirect_field_name)
# From Django 2.2, modified to pass the request rather than just the
# user into test_func; this is useful so that we can revalidate the
# subdomain matches the user's realm. It is likely that we could make
# the subdomain validation happen elsewhere and switch to using the
# stock Django version.
def user_passes_test(
test_func: Callable[[HttpRequest], bool],
login_url: Optional[str] = None,
redirect_field_name: str = REDIRECT_FIELD_NAME,
) -> Callable[[ViewFuncT], ViewFuncT]:
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
if test_func(request):
return view_func(request, *args, **kwargs)
return zulip_redirect_to_login(request, login_url, redirect_field_name)
return cast(ViewFuncT, _wrapped_view) # https://github.com/python/mypy/issues/1927
return decorator
def logged_in_and_active(request: HttpRequest) -> bool:
if not request.user.is_authenticated:
return False
if not request.user.is_active:
return False
if request.user.realm.deactivated:
return False
return user_matches_subdomain(get_subdomain(request), request.user)
def do_two_factor_login(request: HttpRequest, user_profile: UserProfile) -> None:
device = default_device(user_profile)
if device:
django_otp.login(request, device)
def do_login(request: HttpRequest, user_profile: UserProfile) -> None:
"""Creates a session, logging in the user, using the Django method,
and also adds helpful data needed by our server logs.
"""
django_login(request, user_profile)
RequestNotes.get_notes(request).requestor_for_logs = user_profile.format_requestor_for_logs()
process_client(request, user_profile, is_browser_view=True)
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
# Log in with two factor authentication as well.
do_two_factor_login(request, user_profile)
def log_view_func(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
RequestNotes.get_notes(request).query = view_func.__name__
return view_func(request, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def add_logging_data(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
process_client(request, request.user, is_browser_view=True, query=view_func.__name__)
return rate_limit()(view_func)(request, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def human_users_only(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
assert request.user.is_authenticated
if request.user.is_bot:
raise JsonableError(_("This endpoint does not accept bot requests."))
return view_func(request, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
@overload
def zulip_login_required(
function: ViewFuncT,
redirect_field_name: str = REDIRECT_FIELD_NAME,
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> ViewFuncT:
...
@overload
def zulip_login_required(
function: None,
redirect_field_name: str = REDIRECT_FIELD_NAME,
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> Callable[[ViewFuncT], ViewFuncT]:
...
# Based on Django 1.8's @login_required
def zulip_login_required(
function: Optional[ViewFuncT] = None,
redirect_field_name: str = REDIRECT_FIELD_NAME,
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> Union[Callable[[ViewFuncT], ViewFuncT], ViewFuncT]:
actual_decorator = lambda function: user_passes_test(
logged_in_and_active,
login_url=login_url,
redirect_field_name=redirect_field_name,
)(
zulip_otp_required(
redirect_field_name=redirect_field_name,
login_url=login_url,
)(add_logging_data(function))
)
if function:
return actual_decorator(function)
return actual_decorator # nocoverage # We don't use this without a function
def web_public_view(
view_func: ViewFuncT,
redirect_field_name: str = REDIRECT_FIELD_NAME,
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> Union[Callable[[ViewFuncT], ViewFuncT], ViewFuncT]:
"""
This wrapper adds client info for unauthenticated users but
forces authenticated users to go through 2fa.
"""
actual_decorator = lambda view_func: zulip_otp_required(
redirect_field_name=redirect_field_name, login_url=login_url
)(add_logging_data(view_func))
return actual_decorator(view_func)
def require_server_admin(view_func: ViewFuncT) -> ViewFuncT:
@zulip_login_required
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
if not request.user.is_staff:
return HttpResponseRedirect(settings.HOME_NOT_LOGGED_IN)
return add_logging_data(view_func)(request, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def require_server_admin_api(view_func: ViewFuncT) -> ViewFuncT:
@zulip_login_required
@wraps(view_func)
def _wrapped_view_func(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if not user_profile.is_staff:
raise JsonableError(_("Must be an server administrator"))
return view_func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def require_non_guest_user(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if user_profile.is_guest:
raise JsonableError(_("Not allowed for guest users"))
return view_func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def require_member_or_admin(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if user_profile.is_guest:
raise JsonableError(_("Not allowed for guest users"))
if user_profile.is_bot:
raise JsonableError(_("This endpoint does not accept bot requests."))
return view_func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def require_user_group_edit_permission(view_func: ViewFuncT) -> ViewFuncT:
@require_member_or_admin
@wraps(view_func)
def _wrapped_view_func(
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
) -> HttpResponse:
if not user_profile.can_edit_user_groups():
raise JsonableError(_("Insufficient permission"))
return view_func(request, user_profile, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
# This API endpoint is used only for the mobile apps. It is part of a
# workaround for the fact that React Native doesn't support setting
# HTTP basic authentication headers.
def authenticated_uploads_api_view(
skip_rate_limiting: bool = False,
) -> Callable[[Callable[..., HttpResponse]], Callable[..., HttpResponse]]:
def _wrapped_view_func(view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
@csrf_exempt
@has_request_variables
@wraps(view_func)
def _wrapped_func_arguments(
request: HttpRequest, api_key: str = REQ(), *args: object, **kwargs: object
) -> HttpResponse:
user_profile = validate_api_key(request, None, api_key, False)
if not skip_rate_limiting:
limited_func = rate_limit()(view_func)
else:
limited_func = view_func
return limited_func(request, user_profile, *args, **kwargs)
return _wrapped_func_arguments
return _wrapped_view_func
# A more REST-y authentication decorator, using, in particular, HTTP basic
# authentication.
#
# If webhook_client_name is specific, the request is a webhook view
# with that string as the basis for the client string.
def authenticated_rest_api_view(
*,
webhook_client_name: Optional[str] = None,
allow_webhook_access: bool = False,
skip_rate_limiting: bool = False,
) -> Callable[[Callable[..., HttpResponse]], Callable[..., HttpResponse]]:
if webhook_client_name is not None:
allow_webhook_access = True
def _wrapped_view_func(view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
@csrf_exempt
@wraps(view_func)
def _wrapped_func_arguments(
request: HttpRequest, *args: object, **kwargs: object
) -> HttpResponse:
# First try block attempts to get the credentials we need to do authentication
try:
# Grab the base64-encoded authentication string, decode it, and split it into
# the email and API key
auth_type, credentials = request.META["HTTP_AUTHORIZATION"].split()
# case insensitive per RFC 1945
if auth_type.lower() != "basic":
raise JsonableError(_("This endpoint requires HTTP basic authentication."))
role, api_key = base64.b64decode(credentials).decode().split(":")
except ValueError:
return json_unauthorized(_("Invalid authorization header for basic auth"))
except KeyError:
return json_unauthorized(_("Missing authorization header for basic auth"))
# Now we try to do authentication or die
try:
# profile is a Union[UserProfile, RemoteZulipServer]
profile = validate_api_key(
request,
role,
api_key,
allow_webhook_access=allow_webhook_access,
client_name=full_webhook_client_name(webhook_client_name),
)
except JsonableError as e:
return json_unauthorized(e.msg)
try:
if not skip_rate_limiting:
# Apply rate limiting
target_view_func = rate_limit()(view_func)
else:
target_view_func = view_func
return target_view_func(request, profile, *args, **kwargs)
except Exception as err:
if not webhook_client_name:
raise err
if isinstance(err, JsonableError) and not isinstance(
err, WebhookError
): # nocoverage
raise err
if isinstance(err, WebhookError):
err.webhook_name = webhook_client_name
log_exception_to_webhook_logger(err)
raise err
return _wrapped_func_arguments
return _wrapped_view_func
def process_as_post(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
# Adapted from django/http/__init__.py.
# So by default Django doesn't populate request.POST for anything besides
# POST requests. We want this dict populated for PATCH/PUT, so we have to
# do it ourselves.
#
# This will not be required in the future, a bug will be filed against
# Django upstream.
if not request.POST:
# Only take action if POST is empty.
if request.META.get("CONTENT_TYPE", "").startswith("multipart"):
# Note that request._files is just the private attribute that backs the
# FILES property, so we are essentially setting request.FILES here. (In
# Django 1.5 FILES was still a read-only property.)
request.POST, request._files = MultiPartParser(
request.META,
BytesIO(request.body),
request.upload_handlers,
request.encoding,
).parse()
else:
request.POST = QueryDict(request.body, encoding=request.encoding)
return view_func(request, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def authenticate_log_and_execute_json(
request: HttpRequest,
view_func: ViewFuncT,
*args: object,
skip_rate_limiting: bool = False,
allow_unauthenticated: bool = False,
**kwargs: object,
) -> HttpResponse:
if not skip_rate_limiting:
limited_view_func = rate_limit()(view_func)
else:
limited_view_func = view_func
if not request.user.is_authenticated:
if not allow_unauthenticated:
return json_unauthorized()
process_client(
request,
request.user,
is_browser_view=True,
skip_update_user_activity=True,
query=view_func.__name__,
)
return limited_view_func(request, request.user, *args, **kwargs)
user_profile = request.user
validate_account_and_subdomain(request, user_profile)
if user_profile.is_incoming_webhook:
raise JsonableError(_("Webhook bots can only access webhooks"))
process_client(request, user_profile, is_browser_view=True, query=view_func.__name__)
return limited_view_func(request, user_profile, *args, **kwargs)
# Checks if the user is logged in. If not, return an error (the
# @login_required behavior of redirecting to a login page doesn't make
# sense for json views)
def authenticated_json_view(
view_func: Callable[..., HttpResponse],
skip_rate_limiting: bool = False,
allow_unauthenticated: bool = False,
) -> Callable[..., HttpResponse]:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
return authenticate_log_and_execute_json(
request,
view_func,
*args,
skip_rate_limiting=skip_rate_limiting,
allow_unauthenticated=allow_unauthenticated,
**kwargs,
)
return _wrapped_view_func
def is_local_addr(addr: str) -> bool:
return addr in ("127.0.0.1", "::1")
# These views are used by the main Django server to notify the Tornado server
# of events. We protect them from the outside world by checking a shared
# secret, and also the originating IP (for now).
def authenticate_notify(request: HttpRequest) -> bool:
return (
is_local_addr(request.META["REMOTE_ADDR"])
and request.POST.get("secret") == settings.SHARED_SECRET
)
def client_is_exempt_from_rate_limiting(request: HttpRequest) -> bool:
# Don't rate limit requests from Django that come from our own servers,
# and don't rate-limit dev instances
client = RequestNotes.get_notes(request).client
return (client is not None and client.name.lower() == "internal") and (
is_local_addr(request.META["REMOTE_ADDR"]) or settings.DEBUG_RATE_LIMITING
)
def internal_notify_view(
is_tornado_view: bool,
) -> Callable[[ViewFuncT], Callable[..., HttpResponse]]:
# The typing here could be improved by using the extended Callable types:
# https://mypy.readthedocs.io/en/stable/additional_features.html#extended-callable-types
"""Used for situations where something running on the Zulip server
needs to make a request to the (other) Django/Tornado processes running on
the server."""
def _wrapped_view_func(view_func: ViewFuncT) -> Callable[..., HttpResponse]:
@csrf_exempt
@require_post
@wraps(view_func)
def _wrapped_func_arguments(
request: HttpRequest, *args: object, **kwargs: object
) -> HttpResponse:
if not authenticate_notify(request):
raise AccessDeniedError()
request_notes = RequestNotes.get_notes(request)
is_tornado_request = request_notes.tornado_handler is not None
# These next 2 are not security checks; they are internal
# assertions to help us find bugs.
if is_tornado_view and not is_tornado_request:
raise RuntimeError("Tornado notify view called with no Tornado handler")
if not is_tornado_view and is_tornado_request:
raise RuntimeError("Django notify view called with Tornado handler")
request_notes.requestor_for_logs = "internal"
return view_func(request, *args, **kwargs)
return _wrapped_func_arguments
return _wrapped_view_func
def to_utc_datetime(var_name: str, timestamp: str) -> datetime.datetime:
return timestamp_to_datetime(float(timestamp))
def statsd_increment(counter: str, val: int = 1) -> Callable[[FuncT], FuncT]:
"""Increments a statsd counter on completion of the
decorated function.
Pass the name of the counter to this decorator-returning function."""
def wrapper(func: FuncT) -> FuncT:
@wraps(func)
def wrapped_func(*args: object, **kwargs: object) -> object:
ret = func(*args, **kwargs)
statsd.incr(counter, val)
return ret
return cast(FuncT, wrapped_func) # https://github.com/python/mypy/issues/1927
return wrapper
def rate_limit_user(request: HttpRequest, user: UserProfile, domain: str) -> None:
"""Returns whether or not a user was rate limited. Will raise a RateLimited exception
if the user has been rate limited, otherwise returns and modifies request to contain
the rate limit information"""
RateLimitedUser(user, domain=domain).rate_limit_request(request)
@cache_with_key(lambda: "tor_ip_addresses:", timeout=60 * 60)
@circuit(failure_threshold=2, recovery_timeout=60 * 10)
def get_tor_ips() -> Set[str]:
if not settings.RATE_LIMIT_TOR_TOGETHER:
return set()
# Cron job in /etc/cron.d/fetch-for-exit-nodes fetches this
# hourly; we cache it in memcached to prevent going to disk on
# every unauth'd request. In case of failures to read, we
# circuit-break so 2 failures cause a 10-minute backoff.
with open(settings.TOR_EXIT_NODE_FILE_PATH, "rb") as f:
exit_node_list = orjson.loads(f.read())
# This should always be non-empty; if it's empty, assume something
# went wrong with writing and treat it as a non-existent file.
# Circuit-breaking will ensure that we back off on re-reading the
# file.
if len(exit_node_list) == 0:
raise OSError("File is empty")
return set(exit_node_list)
def rate_limit_ip(request: HttpRequest, ip_addr: str, domain: str) -> None:
RateLimitedIPAddr(ip_addr, domain=domain).rate_limit_request(request)
def rate_limit_request_by_ip(request: HttpRequest, domain: str) -> None:
# REMOTE_ADDR is set by SetRemoteAddrFromRealIpHeader in conjunction
# with the nginx configuration to guarantee this to be *the* correct
# IP address to use - without worrying we'll grab the IP of a proxy.
ip_addr = request.META["REMOTE_ADDR"]
assert ip_addr
try:
# We lump all TOR exit nodes into one bucket; this prevents
# abuse from TOR, while still allowing some access to these
# endpoints for legitimate users. Checking for local
# addresses is a shortcut somewhat for ease of testing without
# mocking the TOR endpoint in every test.
if is_local_addr(ip_addr):
pass
elif ip_addr in get_tor_ips():
ip_addr = "tor-exit-node"
except (OSError, CircuitBreakerError) as err:
# In the event that we can't get an updated list of TOR exit
# nodes, assume the IP is _not_ one, and leave it unchanged.
# We log a warning so that this endpoint being taken out of
# service doesn't silently remove this functionality.
rate_limiter_logger.warning("Failed to fetch TOR exit node list: %s", err)
pass
rate_limit_ip(request, ip_addr, domain=domain)
def rate_limit_remote_server(
request: HttpRequest, remote_server: "RemoteZulipServer", domain: str
) -> None:
try:
RateLimitedRemoteZulipServer(remote_server, domain=domain).rate_limit_request(request)
except RateLimited as e:
rate_limiter_logger.warning(
"Remote server %s exceeded rate limits on domain %s", remote_server, domain
)
raise e
def rate_limit() -> Callable[[ViewFuncT], ViewFuncT]:
"""Rate-limits a view. Returns a decorator"""
def wrapper(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapped_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
# It is really tempting to not even wrap our original function
# when settings.RATE_LIMITING is False, but it would make
# for awkward unit testing in some situations.
if not settings.RATE_LIMITING:
return func(request, *args, **kwargs)
if client_is_exempt_from_rate_limiting(request):
return func(request, *args, **kwargs)
user = request.user
if isinstance(user, AnonymousUser):
rate_limit_request_by_ip(request, domain="api_by_ip")
return func(request, *args, **kwargs)
elif settings.ZILENCER_ENABLED and isinstance(user, RemoteZulipServer):
rate_limit_remote_server(request, user, domain="api_by_remote_server")
else:
assert isinstance(user, UserProfile)
rate_limit_user(request, user, domain="api_by_user")
return func(request, *args, **kwargs)
return cast(ViewFuncT, wrapped_func) # https://github.com/python/mypy/issues/1927
return wrapper
def return_success_on_head_request(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
if request.method == "HEAD":
return json_success(request)
return view_func(request, *args, **kwargs)
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
def zulip_otp_required(
redirect_field_name: str = "next",
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> Callable[[ViewFuncT], ViewFuncT]:
"""
The reason we need to create this function is that the stock
otp_required decorator doesn't play well with tests. We cannot
enable/disable if_configured parameter during tests since the decorator
retains its value due to closure.
Similar to :func:`~django.contrib.auth.decorators.login_required`, but
requires the user to be :term:`verified`. By default, this redirects users
to :setting:`OTP_LOGIN_URL`.
"""
def test(user: UserProfile) -> bool:
"""
:if_configured: If ``True``, an authenticated user with no confirmed
OTP devices will be allowed. Also, non-authenticated users will be
allowed as spectator users. Default is ``False``. If ``False``,
2FA will not do any authentication.
"""
if_configured = settings.TWO_FACTOR_AUTHENTICATION_ENABLED
if not if_configured:
return True
# User has completed 2FA verification
if user.is_verified():
return True
# This request is unauthenticated (logged-out) access; 2FA is
# not required or possible.
#
# TODO: Add a test for 2FA-enabled with web-public views.
if not user.is_authenticated: # nocoverage
return True
# If the user doesn't have 2FA set up, we can't enforce 2FA.
if not user_has_device(user):
return True
# User has configured 2FA and is not verified, so the user
# fails the test (and we should redirect to the 2FA view).
return False
decorator = django_user_passes_test(
test, login_url=login_url, redirect_field_name=redirect_field_name
)
return decorator
def add_google_analytics_context(context: Dict[str, object]) -> None:
if settings.GOOGLE_ANALYTICS_ID is not None: # nocoverage
page_params = context.setdefault("page_params", {})
assert isinstance(page_params, dict)
page_params["google_analytics_id"] = settings.GOOGLE_ANALYTICS_ID
def add_google_analytics(view_func: ViewFuncT) -> ViewFuncT:
@wraps(view_func)
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
response = view_func(request, *args, **kwargs)
if isinstance(response, SimpleTemplateResponse):
if response.context_data is None:
response.context_data = {}
add_google_analytics_context(response.context_data)
elif response.status_code == 200: # nocoverage
raise TypeError("add_google_analytics requires a TemplateResponse")
return response
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
| {
"content_hash": "c392a220de9ea5d117bac95a11dd5e75",
"timestamp": "",
"source": "github",
"line_count": 1072,
"max_line_length": 112,
"avg_line_length": 39.196828358208954,
"alnum_prop": 0.6607249101596897,
"repo_name": "kou/zulip",
"id": "eb13f08153f97f1fd96fdffea54377b391c1d0b4",
"size": "42019",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "zerver/decorator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "433376"
},
{
"name": "Dockerfile",
"bytes": "2941"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "635452"
},
{
"name": "Handlebars",
"bytes": "235334"
},
{
"name": "JavaScript",
"bytes": "3361648"
},
{
"name": "Perl",
"bytes": "8594"
},
{
"name": "Puppet",
"bytes": "79932"
},
{
"name": "Python",
"bytes": "8142846"
},
{
"name": "Ruby",
"bytes": "8480"
},
{
"name": "Shell",
"bytes": "134587"
},
{
"name": "TypeScript",
"bytes": "20233"
}
],
"symlink_target": ""
} |
SECRET_KEY = 'secret'
DEBUG = True
DATABASE_URL = 'postgresql://dila:dilapasswd@localhost/dila'
| {
"content_hash": "a928d0ffbab17a1f0a1e7f93fb7c78e0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 60,
"avg_line_length": 32,
"alnum_prop": 0.75,
"repo_name": "socialwifi/dila",
"id": "fa0a5aba5cc336e7b7a2abe2dc8862467e2d5231",
"size": "96",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "11934"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "116973"
},
{
"name": "Shell",
"bytes": "1491"
}
],
"symlink_target": ""
} |
import os
import sys
from optparse import OptionParser
sort_key = ['local', 'cage', 'hail', 'east', 'west', 'eu', 'asia']
def process_command_line(argv):
global operation_mode
parser = OptionParser(usage="usage: %prog [option]", version="MOPED Desktop Client")
parser.add_option(
'-i', '--input', action='store', type='string', dest='input_file',
help='Set Input file')
parser.add_option(
'-d', '--dir', action='store', type='string', dest='input_dir',
help='Set Input directory')
parser.add_option(
'-o', '--output', action='store', type='int', dest='output_file',
help='Set output file')
settings, args = parser.parse_args(argv)
if not len(args) == 0:
parser.error('program takes no command-line arguments; "%s" ignored.' % (args,))
return settings, args
def convert_to_CDF(input_file, output_file):
input_lines = open(input_file, "r").read().split("\n")
output_file = open(output_file, "w")
rtt_list = []
jitter_sum = 0.0
start_time = 0.0
end_time = 0.0
for index, oneline in enumerate(input_lines):
if len(oneline.split("\t")) != 6 and len(oneline.split("\t")) != 5:
#sys.stderr.write("Error at input line at %d, %s\n" % (index, oneline))
continue
try:
if float(oneline.split("\t")[2]) == 0:
sys.stderr.write("Error at input line at %d, %s\n" % (index, oneline))
continue
except ValueError:
continue
try:
rtt_list.append(float(oneline.split("\t")[3]))
if not index == 0:
# protect error case where initial jitter value is equals to latency
jitter_sum += (float(oneline.split("\t")[4]))
if start_time == 0.0:
start_time = float(oneline.split("\t")[1])
end_time = float(oneline.split("\t")[2])
except ValueError:
sys.stderr.write("Error at input line at %d, %s\n" % (index, oneline))
continue
rtt_sorted = sorted(rtt_list)
total_rtt_number = len(rtt_sorted)
cdf = []
summary = "%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f" % (rtt_sorted[0],
rtt_sorted[int(total_rtt_number*0.01)], \
rtt_sorted[int(total_rtt_number*0.25)], \
rtt_sorted[int(total_rtt_number*0.5)], \
rtt_sorted[int(total_rtt_number*0.75)], \
rtt_sorted[int(total_rtt_number*0.99)], \
rtt_sorted[-1], \
jitter_sum/total_rtt_number, \
(end_time-start_time))
for index, value in enumerate(rtt_sorted):
data = (value, 1.0 * (index+1)/total_rtt_number)
cdf_string = "%f\t%f\n" % (data[0], data[1])
output_file.write(cdf_string)
cdf.append(data)
return summary, cdf
def main(argv=None):
global LOCAL_IPADDRESS
global sort_key
settings, args = process_command_line(sys.argv[1:])
if settings.input_file and os.path.exists(settings.input_file):
convert_to_CDF(settings.input_file, settings.input_file + ".cdf")
elif settings.input_dir and len(os.listdir(settings.input_dir)) > 0 :
summary_list = []
cdf_all_list = []
input_file_list = []
for each_file in os.listdir(settings.input_dir):
if os.path.isdir(os.path.join(settings.input_dir, each_file)):
continue
if each_file.find(".") != -1:
continue
input_file_list.append(each_file)
# sort by keyword
file_list = []
counter = 0
for key_word in sort_key:
for each_file in input_file_list:
if each_file.find(key_word) != -1:
counter += 1
file_list.append(each_file)
print "File : %s" % each_file
for each_file in file_list:
input_file = os.path.join(settings.input_dir, each_file)
summary_str, cdf_list = convert_to_CDF(input_file, input_file + ".cdf")
summary_list.append(summary_str)
cdf_all_list.append(cdf_list)
# print out all data
print "="*50
print "\tmin\t1%\t25%\t50%\t75%\t99%\tmax\tjitter\trun_time"
for index, summary in enumerate(summary_list):
print "%s\t%s" % (file_list[index], summary)
print "\n"*2
for each_file in file_list:
sys.stdout.write("%s\t\t" % os.path.splitext(os.path.basename(each_file))[0])
sys.stdout.write("\n")
# Get longest CDF
max_length = 0
for cdf_ret in cdf_all_list:
if len(cdf_ret) > max_length:
max_length = len(cdf_ret)
for index in xrange(max_length):
for cdf_list in cdf_all_list:
if len(cdf_list) > index:
sys.stdout.write("%f\t%f\t" % (cdf_list[index][0], cdf_list[index][1]))
else:
sys.stdout.write("\t\t")
sys.stdout.write("\n")
return 0
if __name__ == "__main__":
status = main()
sys.exit(status)
| {
"content_hash": "80a9dbe78861d5d22a4b16692c38e401",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 91,
"avg_line_length": 36.6241134751773,
"alnum_prop": 0.539697908597986,
"repo_name": "cmusatyalab/elijah-provisioning",
"id": "eaaaa69e6dd30551a60cfa91f5421d30e0588f10",
"size": "5898",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/app-client/scripts/data_to_cdf.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "84100"
},
{
"name": "CSS",
"bytes": "920"
},
{
"name": "HTML",
"bytes": "3775"
},
{
"name": "Java",
"bytes": "318511"
},
{
"name": "JavaScript",
"bytes": "842050"
},
{
"name": "M4",
"bytes": "1339"
},
{
"name": "Makefile",
"bytes": "651"
},
{
"name": "Python",
"bytes": "859033"
},
{
"name": "Shell",
"bytes": "2441"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import logging
from oauthlib.common import add_params_to_uri
from oauthlib.common import urldecode as _urldecode
from oauthlib.oauth1 import (
SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_TYPE_AUTH_HEADER
)
import requests
from . import OAuth1
import sys
if sys.version > "3":
unicode = str
log = logging.getLogger(__name__)
def urldecode(body):
"""Parse query or json to python dictionary"""
try:
return _urldecode(body)
except:
import json
return json.loads(body)
class TokenRequestDenied(ValueError):
def __init__(self, message, status_code):
super(TokenRequestDenied, self).__init__(message)
self.status_code = status_code
class TokenMissing(ValueError):
def __init__(self, message, response):
super(TokenRequestDenied, self).__init__(message)
self.response = response
class VerifierMissing(ValueError):
pass
class OAuth1Session(requests.Session):
"""Request signing and convenience methods for the oauth dance.
What is the difference between OAuth1Session and OAuth1?
OAuth1Session actually uses OAuth1 internally and it's purpose is to assist
in the OAuth workflow through convenience methods to prepare authorization
URLs and parse the various token and redirection responses. It also provide
rudimentary validation of responses.
An example of the OAuth workflow using a basic CLI app and Twitter.
>>> # Credentials obtained during the registration.
>>> client_key = 'client key'
>>> client_secret = 'secret'
>>> callback_uri = 'https://127.0.0.1/callback'
>>>
>>> # Endpoints found in the OAuth provider API documentation
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> access_token_url = 'https://api.twitter.com/oauth/access_token'
>>>
>>> oauth_session = OAuth1Session(client_key,client_secret=client_secret, callback_uri=callback_uri)
>>>
>>> # First step, fetch the request token.
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'kjerht2309u',
'oauth_token_secret': 'lsdajfh923874',
}
>>>
>>> # Second step. Follow this link and authorize
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
>>>
>>> # Third step. Fetch the access token
>>> redirect_response = raw_input('Paste the full redirect URL here.')
>>> oauth_session.parse_authorization_response(redirect_response)
{
'oauth_token: 'kjerht2309u',
'oauth_token_secret: 'lsdajfh923874',
'oauth_verifier: 'w34o8967345',
}
>>> oauth_session.fetch_access_token(access_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> # Done. You can now make OAuth requests.
>>> status_url = 'http://api.twitter.com/1/statuses/update.json'
>>> new_status = {'status': 'hello world!'}
>>> oauth_session.post(status_url, data=new_status)
<Response [200]>
"""
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None,
verifier=None,
client_class=None,
force_include_body=False,
**kwargs):
"""Construct the OAuth 1 session.
:param client_key: A client specific identifier.
:param client_secret: A client specific secret used to create HMAC and
plaintext signatures.
:param resource_owner_key: A resource owner key, also referred to as
request token or access token depending on
when in the workflow it is used.
:param resource_owner_secret: A resource owner secret obtained with
either a request or access token. Often
referred to as token secret.
:param callback_uri: The URL the user is redirect back to after
authorization.
:param signature_method: Signature methods determine how the OAuth
signature is created. The three options are
oauthlib.oauth1.SIGNATURE_HMAC (default),
oauthlib.oauth1.SIGNATURE_RSA and
oauthlib.oauth1.SIGNATURE_PLAIN.
:param signature_type: Signature type decides where the OAuth
parameters are added. Either in the
Authorization header (default) or to the URL
query parameters or the request body. Defined as
oauthlib.oauth1.SIGNATURE_TYPE_AUTH_HEADER,
oauthlib.oauth1.SIGNATURE_TYPE_QUERY and
oauthlib.oauth1.SIGNATURE_TYPE_BODY
respectively.
:param rsa_key: The private RSA key as a string. Can only be used with
signature_method=oauthlib.oauth1.SIGNATURE_RSA.
:param verifier: A verifier string to prove authorization was granted.
:param client_class: A subclass of `oauthlib.oauth1.Client` to use with
`requests_oauthlib.OAuth1` instead of the default
:param force_include_body: Always include the request body in the
signature creation.
:param **kwargs: Additional keyword arguments passed to `OAuth1`
"""
super(OAuth1Session, self).__init__()
self._client = OAuth1(client_key,
client_secret=client_secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
callback_uri=callback_uri,
signature_method=signature_method,
signature_type=signature_type,
rsa_key=rsa_key,
verifier=verifier,
client_class=client_class,
force_include_body=force_include_body,
**kwargs)
self.auth = self._client
@property
def authorized(self):
"""Boolean that indicates whether this session has an OAuth token
or not. If `self.authorized` is True, you can reasonably expect
OAuth-protected requests to the resource to succeed. If
`self.authorized` is False, you need the user to go through the OAuth
authentication dance before OAuth-protected requests to the resource
will succeed.
"""
if self._client.client.signature_method == SIGNATURE_RSA:
# RSA only uses resource_owner_key
return bool(self._client.client.resource_owner_key)
else:
# other methods of authentication use all three pieces
return (
bool(self._client.client.client_secret) and
bool(self._client.client.resource_owner_key) and
bool(self._client.client.resource_owner_secret)
)
def authorization_url(self, url, request_token=None, **kwargs):
"""Create an authorization URL by appending request_token and optional
kwargs to url.
This is the second step in the OAuth 1 workflow. The user should be
redirected to this authorization URL, grant access to you, and then
be redirected back to you. The redirection back can either be specified
during client registration or by supplying a callback URI per request.
:param url: The authorization endpoint URL.
:param request_token: The previously obtained request token.
:param kwargs: Optional parameters to append to the URL.
:returns: The authorization URL with new parameters embedded.
An example using a registered default callback URI.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf'
>>> oauth_session.authorization_url(authorization_url, foo='bar')
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&foo=bar'
An example using an explicit callback URI.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret', callback_uri='https://127.0.0.1/callback')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
"""
kwargs['oauth_token'] = request_token or self._client.client.resource_owner_key
log.debug('Adding parameters %s to url %s', kwargs, url)
return add_params_to_uri(url, kwargs.items())
def fetch_request_token(self, url, realm=None):
"""Fetch a request token.
This is the first step in the OAuth 1 workflow. A request token is
obtained by making a signed post request to url. The token is then
parsed from the application/x-www-form-urlencoded response and ready
to be used to construct an authorization url.
:param url: The request token endpoint URL.
:param realm: A list of realms to request access to.
:returns: The response in dict format.
Note that a previously set callback_uri will be reset for your
convenience, or else signature creation will be incorrect on
consecutive requests.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
"""
self._client.client.realm = ' '.join(realm) if realm else None
token = self._fetch_token(url)
log.debug('Resetting callback_uri and realm (not needed in next phase).')
self._client.client.callback_uri = None
self._client.client.realm = None
return token
def fetch_access_token(self, url, verifier=None):
"""Fetch an access token.
This is the final step in the OAuth 1 workflow. An access token is
obtained using all previously obtained credentials, including the
verifier from the authorization step.
Note that a previously set verifier will be reset for your
convenience, or else signature creation will be incorrect on
consecutive requests.
>>> access_token_url = 'https://api.twitter.com/oauth/access_token'
>>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
>>> oauth_session.parse_authorization_response(redirect_response)
{
'oauth_token: 'kjerht2309u',
'oauth_token_secret: 'lsdajfh923874',
'oauth_verifier: 'w34o8967345',
}
>>> oauth_session.fetch_access_token(access_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
"""
if verifier:
self._client.client.verifier = verifier
if not getattr(self._client.client, 'verifier', None):
raise VerifierMissing('No client verifier has been set.')
token = self._fetch_token(url)
log.debug('Resetting verifier attribute, should not be used anymore.')
self._client.client.verifier = None
return token
def parse_authorization_response(self, url):
"""Extract parameters from the post authorization redirect response URL.
:param url: The full URL that resulted from the user being redirected
back from the OAuth provider to you, the client.
:returns: A dict of parameters extracted from the URL.
>>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
>>> oauth_session.parse_authorization_response(redirect_response)
{
'oauth_token: 'kjerht2309u',
'oauth_token_secret: 'lsdajfh923874',
'oauth_verifier: 'w34o8967345',
}
"""
log.debug('Parsing token from query part of url %s', url)
token = dict(urldecode(urlparse(url).query))
log.debug('Updating internal client token attribute.')
self._populate_attributes(token)
return token
def _populate_attributes(self, token):
if 'oauth_token' in token:
self._client.client.resource_owner_key = token['oauth_token']
else:
raise TokenMissing(
'Response does not contain a token: {resp}'.format(resp=token),
token,
)
if 'oauth_token_secret' in token:
self._client.client.resource_owner_secret = (
token['oauth_token_secret'])
if 'oauth_verifier' in token:
self._client.client.verifier = token['oauth_verifier']
def _fetch_token(self, url):
log.debug('Fetching token from %s using client %s', url, self._client.client)
r = self.post(url)
if r.status_code >= 400:
error = "Token request failed with code %s, response was '%s'."
raise TokenRequestDenied(error % (r.status_code, r.text), r.status_code)
log.debug('Decoding token from response "%s"', r.text)
try:
token = dict(urldecode(r.text))
except ValueError as e:
error = ("Unable to decode token from token response. "
"This is commonly caused by an unsuccessful request where"
" a non urlencoded error message is returned. "
"The decoding error was %s""" % e)
raise ValueError(error)
log.debug('Obtained token %s', token)
log.debug('Updating internal client attributes from token data.')
self._populate_attributes(token)
return token
def rebuild_auth(self, prepared_request, response):
"""
When being redirected we should always strip Authorization
header, since nonce may not be reused as per OAuth spec.
"""
if 'Authorization' in prepared_request.headers:
# If we get redirected to a new host, we should strip out
# any authentication headers.
prepared_request.headers.pop('Authorization', True)
prepared_request.prepare_auth(self.auth)
return
| {
"content_hash": "db54bb360077fec5bee972f319aa0436",
"timestamp": "",
"source": "github",
"line_count": 371,
"max_line_length": 145,
"avg_line_length": 43.498652291105124,
"alnum_prop": 0.6240550254058743,
"repo_name": "dltn/tortellini",
"id": "3064e334f73dd79ad39011697be27a7ce44c5a91",
"size": "16138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server-firmware/Processing/DataPusher/requests_oauthlib/oauth1_session.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "63602"
},
{
"name": "C++",
"bytes": "50699"
},
{
"name": "Processing",
"bytes": "7995"
},
{
"name": "Python",
"bytes": "1253172"
}
],
"symlink_target": ""
} |
'''
Created on 2012-12-25
@author: jock
'''
from xml.dom import minidom
import sys
import re
import os
import android_manifest
reload(sys)
sys.setdefaultencoding("utf-8")
class idtoname(object):
'''
classdocs
'''
mIdToNameDict = {}
def __init__(self, xmlPath, inDir):
'''
Constructor
'''
self.smaliFileList = self.getInFileList(inDir)
self.idToNameMap = idtoname.getMap(xmlPath)
@staticmethod
def getMap(xmlPath):
absPath = os.path.abspath(xmlPath)
if not idtoname.mIdToNameDict.has_key(absPath):
idtoname.mIdToNameDict[absPath] = idtoname.getIdToNameMap(absPath)
return idtoname.mIdToNameDict[absPath]
def getInFileList(self, inDir):
if os.path.isfile(inDir):
return [inDir]
filelist = []
smaliRe = re.compile(r'(?:.*\.smali)')
for root, dirs, files in os.walk(inDir):
for fn in files:
if bool(smaliRe.match(fn)) is True:
filelist.append("%s/%s" % (root, fn))
return filelist
@staticmethod
def getIdToNameMap(xmlPath):
publicXml = minidom.parse(xmlPath)
root = publicXml.documentElement
idList = {}
pkgName = android_manifest.getPackageNameFromPublicXml(xmlPath)
Log.d("package name: %s" %pkgName)
pkgName = pkgName + ':'
for item in root.childNodes:
if item.nodeType == minidom.Node.ELEMENT_NODE:
itemType = item.getAttribute("type")
itemName = item.getAttribute("name")
itemId = item.getAttribute("id").replace(r'0x0', r'0x')
idList[itemId] = "%s%s@%s" % (pkgName, itemType, itemName)
return idList
def getArrayId(self, arrayIdStr):
idList = arrayIdStr.split()
arrayId = "%s%s%s%s" % (idList[3][-3:-1], idList[2][-3:-1], idList[1][-3:-1], idList[0][-3:-1])
arrayId = "0x%s" % (arrayId.replace('x', '0'))
return arrayId.replace('0x0', '0x')
def getIdByHigh16(self, high16Str):
idx = high16Str.index('0x')
rId = '%s%s' % (high16Str[idx:], '0000')
return (rId,high16Str[0:idx])
def getIdByApktool2High16(self, high16Str):
idx = high16Str.index('0x')
rId = high16Str[idx:]
return (rId,high16Str[0:idx])
def idtoname(self):
normalIdRule = re.compile(r'0x(?:[1-9a-f]|7f)[0-1][0-9a-f]{5}$', re.M)
arrayIdRule = re.compile(r'(?:0x[0-9a-f]{1,2}t ){3}0x(?:[1-9a-f]|7f)t')
high16IdRule = re.compile(r'const/high16[ ]*v[0-9][0-9]*,[ ]*0x(?:[1-9a-f]|7f)[0-1][0-9a-f]$', re.M)
apktool2High16IdRule = re.compile(r'const/high16[ ]*v[0-9][0-9]*,[ ]*0x(?:[1-9a-f]|7f)[0-1][0-9a-f]0000$', re.M)
for smaliFile in self.smaliFileList:
#print "start modify: %s" % smaliFile
sf = file(smaliFile, 'r+')
fileStr = sf.read()
modify = False
for matchApktool2Hight16IdStr in list(set(apktool2High16IdRule.findall(fileStr))):
(rId, preStr) = self.getIdByApktool2High16(matchApktool2Hight16IdStr)
name = self.idToNameMap.get(rId, None)
if name is not None:
fileStr = fileStr.replace(matchApktool2Hight16IdStr, r'%s#%s#i' % (preStr, name))
modify = True
Log.d("change id from %s to name %s" % (matchApktool2Hight16IdStr, name))
for matchId in list(set(normalIdRule.findall(fileStr))):
name = self.idToNameMap.get(matchId, None)
if name is not None:
fileStr = fileStr.replace(matchId, r'#%s#t' % name)
modify = True
Log.d("change id from %s to name %s" % (matchId, name))
for matchArrIdStr in list(set(arrayIdRule.findall(fileStr))):
matchArrId = self.getArrayId(matchArrIdStr)
arrName = self.idToNameMap.get(matchArrId, None)
if arrName is not None:
fileStr = fileStr.replace(matchArrIdStr, r'#%s#a' % arrName)
modify = True
Log.d("change array id from %s to name %s" % (matchArrIdStr, arrName))
for matchHigh16IdStr in list(set(high16IdRule.findall(fileStr))):
(rId, preStr) = self.getIdByHigh16(matchHigh16IdStr)
name = self.idToNameMap.get(rId, None)
if name is not None:
fileStr = fileStr.replace(matchHigh16IdStr, r'%s#%s#h' % (preStr, name))
modify = True
Log.d("change id from %s to name %s" % (matchHigh16IdStr, name))
if modify is True:
sf.seek(0, 0)
sf.truncate()
sf.write(fileStr)
sf.close()
class Log:
DEBUG = False
@staticmethod
def d(message):
if Log.DEBUG: print message
@staticmethod
def i(message):
print message
def main():
print "start change id to name...."
if len(sys.argv) == 3:
idtoname(sys.argv[1], sys.argv[2]).idtoname()
else:
print "USAGE: idtoname public.xml DIRECTORY"
print "eg: idtoname public.xml framework.jar.out"
print "change all of the id in framework.jar.out to type@name"
sys.exit(1)
print "change id to name done"
if __name__ == '__main__':
main()
| {
"content_hash": "69f269e7b60a960de7050e9670556445",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 120,
"avg_line_length": 35.21935483870968,
"alnum_prop": 0.5590767539842462,
"repo_name": "FlymeOS/tools",
"id": "a1bad378f03639430b7f48076adedee5c6b3cd6d",
"size": "5482",
"binary": false,
"copies": "2",
"ref": "refs/heads/marshmallow-6.0",
"path": "formatters/idtoname.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "16339"
},
{
"name": "Python",
"bytes": "384840"
},
{
"name": "Shell",
"bytes": "86936"
}
],
"symlink_target": ""
} |
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
import matplotlib as mpl
import pandas as pd
import numpy as np
mpl.use('Agg')
import time
import matplotlib.pyplot as plt
def convolve_inner_layers(x, W, b):
'''
inner layers of network --- tanh activation
'''
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='VALID')
y = tf.nn.bias_add(y, b)
return tf.nn.relu(y)
def convolve_ouput_layer(x, W, b):
'''
output layer of network --- linear activation
'''
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='VALID')
y = tf.nn.bias_add(y, b)
return y
def conv_net(x, W, b):
'''
entire conv net. each layer feed to following layer as well as output layer
'''
conv1 = convolve_inner_layers(x, W['weights1'], b['bias1'])
conv2 = convolve_inner_layers(conv1, W['weights2'], b['bias2'])
conv3 = convolve_inner_layers(conv2, W['weights3'], b['bias3'])
conv4 = convolve_inner_layers(conv3, W['weights4'], b['bias4'])
output_feed = tf.concat([conv1, conv2, conv3, conv4],3)
output = convolve_ouput_layer(output_feed, W['weights_out'], b['bias_out'])
return output
def get_variance(training_target):
'''
returns variance of the target data. used in normalizing the error.
'''
all_pixels = training_target.flatten()
return all_pixels.var()
def normalize_input(train_data, test_data):
'''
normailizing input across each pixel an each channel (i.e. normalize for each input to network).
'''
mean, std_dev = np.mean(train_data, axis=0), np.std(train_data, axis=0)
return (train_data - mean) / std_dev, (test_data - mean) / std_dev
def get_epoch(x, y, n):
'''
splits entire data set into an epoch with minibatch of size n. returns a dict with key being the
minibatch number and the value being a length 2 list with the features in first index and
targets in the second.
'''
input_size = x.shape[0]
number_batches = input_size // n
extra_examples = input_size % n
batches = {}
batch_indices = np.arange(input_size)
np.random.shuffle(batch_indices)
for i in range(number_batches):
temp_indices = batch_indices[n*i:n*(i+1)]
temp_x = []
temp_y = []
for j in temp_indices:
temp_x.append(x[j])
temp_y.append(y[j])
batches[i] = [np.asarray(temp_x), np.asarray(temp_y)]
if extra_examples != 0:
extra_indices = batch_indices[input_size-extra_examples:input_size]
temp_x = []
temp_y = []
for k in extra_indices:
temp_x.append(x[k])
temp_y.append(y[k])
batches[i+1] = [np.asarray(temp_x), np.asarray(temp_y)]
return batches
def main():
print('welcome to luminance net.')
# parameters
filter_dim, filter_dim2 = 11, 1
batch_size = 4
image_dim, result_dim = 96, 86
input_layer, first_layer, second_layer, third_layer, fourth_layer, output_layer = 4, 100, 50, 25, 10, 1
learning_rate = .0001
epochs = 5000
# data input
data_path = 'https://raw.githubusercontent.com/michaelneuder/image_quality_analysis/master/data/sample_data/'
# train data --- 500 images, 96x96 pixels
orig_500 = pd.read_csv('{}orig_500.txt'.format(data_path), header=None, delim_whitespace = True)
recon_500 = pd.read_csv('{}recon_500.txt'.format(data_path), header=None, delim_whitespace = True)
# test data --- 140 images, 96x96 pixels
orig_140 = pd.read_csv('{}orig_140.txt'.format(data_path), header=None, delim_whitespace = True)
recon_140 = pd.read_csv('{}recon_140.txt'.format(data_path), header=None, delim_whitespace = True)
# train target --- 500 images, 86x86 pixels (dimension reduction due no zero padding being used)
luminance_500= pd.read_csv('{}luminance_500.csv'.format(data_path), header=None)
luminance_140 = pd.read_csv('{}luminance_140.csv'.format(data_path), header=None)
print('images loaded...')
# getting 4 input channels for train and test --- (orig, recon, orig squared, recon squared)
original_images_train = orig_500.values
original_images_train_sq = orig_500.values**2
reconstructed_images_train = recon_500.values
reconstructed_images_train_sq = recon_500.values**2
original_images_test = orig_140.values
original_images_test_sq = orig_140.values**2
reconstructed_images_test = recon_140.values
reconstructed_images_test_sq = recon_140.values**2
# stack inputs
training_input = np.dstack((original_images_train, reconstructed_images_train, original_images_train_sq, reconstructed_images_train_sq))
testing_input = np.dstack((original_images_test, reconstructed_images_test, original_images_test_sq, reconstructed_images_test_sq))
# normalize inputs
training_input_normalized, testing_input_normalized = normalize_input(training_input, testing_input)
# target values
training_target = luminance_500.values
testing_target = luminance_140.values
# get size of training and testing set
train_size = original_images_train.shape[0]
test_size = original_images_test.shape[0]
# reshaping features to (num images, 96x96, 4 channels)
train_features = np.reshape(training_input_normalized, [train_size,image_dim,image_dim,input_layer])
test_features = np.reshape(testing_input_normalized, [test_size,image_dim,image_dim,input_layer])
# reshaping target to --- (num images, 86x86, 1)
train_target = np.reshape(training_target, [train_size, result_dim, result_dim, output_layer])
test_target = np.reshape(testing_target, [test_size, result_dim, result_dim, output_layer])
# initializing filters, this is what we are trying to learn --- fan in
scaling_factor = 0.1
initializer = tf.contrib.layers.xavier_initializer()
weights = {
'weights1': tf.get_variable('weights1', [filter_dim,filter_dim,input_layer,first_layer], initializer=initializer),
'weights2': tf.get_variable('weights2', [filter_dim2,filter_dim2,first_layer,second_layer], initializer=initializer),
'weights3': tf.get_variable('weights3', [filter_dim2,filter_dim2,second_layer,third_layer], initializer=initializer),
'weights4': tf.get_variable('weights4', [filter_dim2,filter_dim2,third_layer,fourth_layer], initializer=initializer),
'weights_out': tf.get_variable('weights_out', [filter_dim2,filter_dim2,fourth_layer+third_layer+second_layer+first_layer,output_layer], initializer=initializer)
}
biases = {
'bias1': tf.get_variable('bias1', [first_layer], initializer=initializer),
'bias2': tf.get_variable('bias2', [second_layer], initializer=initializer),
'bias3': tf.get_variable('bias3', [third_layer], initializer=initializer),
'bias4': tf.get_variable('bias4', [fourth_layer], initializer=initializer),
'bias_out': tf.get_variable('bias_out', [output_layer], initializer=initializer)
}
# tensorflow setup
x = tf.placeholder(tf.float32, [None, image_dim, image_dim, input_layer])
y = tf.placeholder(tf.float32, [None, result_dim, result_dim, output_layer])
# model
prediction = conv_net(x, weights, biases)
# get variance to normalize error terms during training
variance = get_variance(train_target)
# loss and optimization
cost = tf.reduce_mean(tf.square(tf.subtract(prediction, y)))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
init = tf.global_variables_initializer()
# error arrays
training_error, testing_error = [], []
epoch_time = np.asarray([])
# tensorflow session & training
with tf.Session() as sess:
sess.run(init)
global_start_time = time.time()
print('starting training...')
for epoch_count in range(epochs):
start_time = time.time()
epoch = get_epoch(train_features, train_target, batch_size)
for i in epoch:
x_data_train, y_data_train = np.asarray(epoch[i][0]), np.asarray(epoch[i][1])
sess.run(optimizer, feed_dict={x : x_data_train, y : y_data_train})
train_loss = sess.run(cost, feed_dict={x : x_data_train, y : y_data_train})
training_error.append(100*train_loss/variance)
test_loss = sess.run(cost, feed_dict={x : test_features, y : test_target})
testing_error.append(100*test_loss/variance)
end_time = time.time()
epoch_time = np.append(epoch_time, end_time-start_time)
print('current epoch: {} -- '.format(epoch_count)
+'current train error: {:.4f} -- '.format(100*train_loss/variance)
+'average epoch time: {:.4}s '.format(epoch_time.mean()))
f, axarr = plt.subplots(nrows=1, ncols=1, figsize=(9,6))
axarr.plot(np.arange(epoch_count+1), training_error, label='train')
axarr.plot(np.arange(epoch_count+1), testing_error, label='test')
axarr.legend()
axarr.set_ylim(0,100)
plt.savefig('relu_1521_luminance.png')
print('training finished.')
if __name__ == '__main__':
main()
| {
"content_hash": "aaa5fc6ede4fb780d15c1901f668c2b0",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 168,
"avg_line_length": 42.9392523364486,
"alnum_prop": 0.6521928392643378,
"repo_name": "michaelneuder/image_quality_analysis",
"id": "621e33127269b775a58e7935762308b73e7bda76",
"size": "9212",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/nets/wip/ms_ssim_nets/luminance_net.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "6435505"
},
{
"name": "Matlab",
"bytes": "18698"
},
{
"name": "Python",
"bytes": "319607"
}
],
"symlink_target": ""
} |
import pymysql.cursors
connection = pymysql.connect(host="127.0.0.1", database="addressbook",
user="root", password="")
try:
cursor = connection.cursor()
cursor.execute("select * from group_list")
for row in cursor.fetchall():
print(row)
finally:
connection.close()
| {
"content_hash": "b87844f818fc2bdf4553ac5d3597acf5",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 70,
"avg_line_length": 27.416666666666668,
"alnum_prop": 0.601823708206687,
"repo_name": "eugene-petrash/address_book",
"id": "35dae4c9bd98f3c41b8bb0a3b312a0f06663eec5",
"size": "329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "check_db_connection.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13021"
}
],
"symlink_target": ""
} |
import rexviewer as r
import circuits
import naali
import time
import sys
import traceback
user, pwd, server = "Test Bot", "test", "world.realxtend.org:9000"
#user, pwd, server = "d d", "d", "world.evocativi.com:8002"
class TestRunner(circuits.BaseComponent):
def __init__(self, cfsection=None):
self.config = cfsection or dict()
circuits.BaseComponent.__init__(self)
self.testgen = self.run()
@circuits.handler("on_sceneadded")
def sceneadded(self, name):
r.logInfo("base class sceneadded callback")
@circuits.handler("update")
def update(self, deltatime):
prev = None
try:
status = self.testgen.next()
except StopIteration:
# print "Test finished"
return
except:
r.logInfo(traceback.format_exc())
r.logInfo("unhandled exception in test")
r.logInfo("Test state: failure")
r.exit()
return
if status:
r.logInfo("Test state: " + str(status))
prev = status
#else:
#print "Test state", prev, "still running"
def timer_start(self):
self.timer_started = time.time()
def elapsed(self, n):
return (time.time() - self.timer_started) > n
class TestLoginLogoutExit(TestRunner):
def run(self):
self.wait_time = int(self.config.get("wait_time", 60))
yield "login"
self.timer_start()
r.startLoginOpensim(user, pwd, server)
print "ELAPSED:", self.elapsed(self.wait_time)
while not self.elapsed(self.wait_time):
#print "X",
yield None
yield "logging out"
self.timer_start()
r.logout()
while not self.elapsed(self.wait_time):
yield None
yield "exiting"
r.exit()
class TestCreateDestroy(TestRunner):
def __init__(self, *args, **kw):
self.finished = False
self.scene = None
self.select_test_ent = None
TestRunner.__init__(self, *args, **kw)
def run(self):
self.wait_time = int(self.config.get("wait_time", 60))
yield "doing login"
self.timer_start()
r.startLoginOpensim(user, pwd, server)
yield "waiting for connection"
conn = None
while not self.elapsed(self.wait_time):
conn = r.getServerConnection()
if conn and conn.IsConnected():
break
else:
yield None
else:
return
yield "waiting for scene"
while not self.scene and not self.elapsed(self.wait_time):
yield None
yield "waiting for Test_SelectTest"
while (not self.select_test_ent) and (not self.elapsed(self.wait_time)):
yield None
if self.select_test_ent:
self.add_extras(self.select_test_ent)
yield "added extras"
yield "done waiting for Test_SelectTest, continuing"
yield "creating object"
r.getServerConnection().SendObjectAddPacket(42, 42, 22)
yield "waiting for EntityCreated"
while (not self.finished) and (not self.elapsed(self.wait_time)):
yield None
yield "exiting"
r.exit()
if (not self.select_test_ent):
if self.finished:
yield "success"
else:
yield "failure"
@circuits.handler("on_sceneadded")
def sceneadded(self, name):
#r.logInfo("CreateDestroy sceneadded called")
self.scene = naali.getScene(name)
self.scene.connect("EntityCreated(Scene::Entity*, AttributeChange::Type)", self.handle_entity_created)
r.logInfo("EntityCreated callback registered")
@circuits.handler("on_exit")
def onexit(self):
#yield "TestCreateDestroy exiting"
r.logInfo("TestCreateDestroy exiting...")
self.delete_extras(self.select_test_ent)
r.logInfo("...done.")
#yield "exit ready. done"
# qt slot
def handle_entity_created(self, ent, changetype):
# fun fact: since we are called for every entity and
# self.finished checked only every "update" event,
# this often cleans up >1 test objects (in case any
# are left over from failed tests)
if ent.Id==2525429102:
r.logInfo("##### found entity I need")
self.select_test_ent = ent
try:
ec_netp = ent.network
except AttributeError:
if 0: print "skip entity without EC_NetworkPosition", dir(ent)
else:
netp = ec_netp.Position
# for some reason z coord ends up as 22.25
#r.logInfo("found entity with netpos %s %s %s" % (netp.x(), netp.y(), netp.z()))
if netp.x() == 42.0 and netp.y() == 42.0 and int(netp.z()) == 22:
r.logInfo("found created test prim - naming, moving and deleting (finished=%s)" % self.finished)
ent.prim.Name = "Seppo"
ent.prim.SendObjectNameUpdate()
pos = ent.placeable.position
pos.setX(netp.x() + 1) #change the x-coordinate
ent.placeable.position = pos
r.logInfo("Moving to move to pos: %s" % pos)
r.getServerConnection().SendObjectDeRezPacket(
ent.Id, r.getTrashFolderId())
self.finished = True
# add_extras simulate object edit highlight and ruler usage
def add_extras(self, ent):
# add EC_Ruler
ruler = ent.GetOrCreateComponentRaw("EC_Ruler")
ruler.SetVisible(True)
#add highlight
try:
ent.highlight
except AttributeError:
ent.GetOrCreateComponentRaw("EC_Highlight")
h = ent.highlight
if not h.IsVisible():
h.Show()
def delete_extras(self, ent):
try:
h = ent.highlight
except AttributeError:
try:
r.logInfo(traceback.format_exc())
r.logInfo("removing highlight, but it doesn't exist anymore: %d" % ent.Id)
except:
r.logInfo(traceback.format_exc())
r.logInfo("Entity already removed")
else:
ent.RemoveComponentRaw(h)
r.logInfo("highlight removed")
try:
ruler = ent.ruler
except AttributeError:
try:
r.logInfo(traceback.format_exc())
r.logInfo("removing ruler, but it doesn't exist anymore: %d" % ent.Id)
except:
r.logInfo(traceback.format_exc())
r.logInfo("Entity already removed")
else:
ent.RemoveComponentRaw(ruler)
r.logInfo("ruler removed")
r.logInfo("Test state: success")
class TestDynamicProperties(TestRunner):
def __init__(self, *args, **kw):
self.scene = None
TestRunner.__init__(self, *args, **kw)
def run(self):
self.wait_time = int(self.config.get("wait_time", 60))
yield "doing login"
self.timer_start()
r.startLoginOpensim(user, pwd, server)
yield "waiting for connection"
conn = None
while not self.elapsed(self.wait_time):
conn = r.getServerConnection()
if conn and conn.IsConnected():
break
else:
yield None
else:
return
yield "waiting for avatar to appear"
ent = None
while not self.elapsed(self.wait_time):
try:
ent = naali.getUserAvatar()
except ValueError:
yield None
else:
break
if not ent:
yield "failure, avatar didn't appear"
return
print 'dynamic property stuff:'
ent.GetOrCreateComponentRaw("EC_DynamicComponent")
print ent, type(ent)
d = ent.qent.EC_DynamicComponent
val = 42.0
d.CreateAttribute("real", val)
d.ComponentChanged(0)
assert val == d.GetAttribute("real")
val = 8.5
d.SetAttribute("real", val)
d.ComponentChanged(0)
assert val == d.GetAttribute("real")
d.RemoveAttribute("real")
d.ComponentChanged(0)
yield "created, changed and removed attribute"
r.exit()
yield "success"
@circuits.handler("on_sceneadded")
def sceneadded(self, name):
#r.logInfo("CreateDestroy sceneadded called")
self.scene = naali.getScene(name)
class TestApi(TestRunner):
def __init__(self, *args, **kw):
self.scene = None
TestRunner.__init__(self, *args, **kw)
def run(self):
self.wait_time = int(self.config.get("wait_time", 60))
yield "doing login"
self.timer_start()
r.startLoginOpensim(user, pwd, server)
yield "waiting for connection"
conn = None
while not self.elapsed(self.wait_time):
conn = r.getServerConnection()
if conn and conn.IsConnected():
break
else:
yield None
else:
return
yield "waiting for avatar to appear"
ent = None
while not self.elapsed(self.wait_time):
try:
ent = naali.getUserAvatar()
except ValueError:
yield None
else:
break
if not ent:
yield "failure, avatar didn't appear"
return
for i in range(100):
for x in self.do_api_calls():
yield x
break
r.exit()
yield "success"
def do_api_calls(self):
yield "createMeshEntity"
e = naali.createMeshEntity("axes.mesh")
from PythonQt.QtGui import QVector3D, QQuaternion
e.placeable.position = QVector3D(128, 128, 60)
e.placeable.Scale = QVector3D(5, 5, 5)
e.placeable.Orientation = QQuaternion(0, 0, 0, 1)
r.removeEntity(e.Id)
yield "EC_Touchable & EC_Highlight"
for longname, shortname in [("EC_Touchable", 'touchable'), ("EC_Highlight", 'highlight')]:
e = naali.getUserAvatar()
e.GetOrCreateComponentRaw(longname)
x = getattr(e, shortname)
x.Show()
x.Hide()
assert x.IsVisible() == False
yield "naali.createEntity"
ent = naali.createEntity()
print "new entity created:", ent
yield "get camera FOV"
fov = naali.getCamera().camera.GetVerticalFov()
yield "avatar position"
p = naali.getUserAvatar().placeable.position
yield "avatar animation controller"
naali.getUserAvatar().animationcontroller.EnableAnimation("Walk")
yield "test sendChat"
r.sendChat("test chat")
yield "test logInfo"
r.logInfo("test log message")
#XXX deprecate
yield "test camera yaw/itch"
r.setCameraYawPitch(.1, .5)
r.getCameraYawPitch()
yield "test webview"
import PythonQt
wv = PythonQt.QtWebKit.QWebView()
wv.show()
yield "test dotscene loading"
from localscene import loader
loader.load_dotscene("pymodules/localscene/test.scene")
yield "test dynamic component"
ent = naali.getUserAvatar()
ent.GetOrCreateComponentRaw("EC_DynamicComponent")
print ent, type(ent)
d = ent.EC_DynamicComponent
d.CreateAttribute("real", 42.0)
d.ComponentChanged(0)
d.SetAttribute("real", 8.5)
d.ComponentChanged(0)
d.RemoveAttribute("real")
d.ComponentChanged(0)
yield "test javascript"
cam = naali.getCamera()
naali.runjs('print("Hello from JS! " + x)', {'x': naali.renderer})
naali.runjs('print("Another hello from JS! " + x)', {'x': naali.inputcontext})
naali.runjs('print("Some camera! " + x)', {'x': cam.camera})
#py objects are not qobjects. naali.runjs('print("Some camera, using naali :O ! " + x.getCamera())', {'x': naali})
naali.runjs('print("Camera Entity " + x)', {'x': cam})
naali.runjs('print("Camera placeable pos: " + pos)', {'pos': cam.placeable.position})
#not exposed yet. naali.runjs('print("QVector3D: " + new QVector3D())', {})
#naali.runjs('var a = {"a": true, "b": 2};')
#naali.runjs('print(a.a + ", " + a.b)')
#naali.runjs('print(JSON.stringify(a))')
#naali.runjs('print("1 + 1 == " + 1 + 1)')
#naali.runjs('print("1 - 1 == " + 1 - 1)')
print ", done."
if 0:
runjs('var b = new QPushButton;')
runjs('b.text = "hep";')
runjs('b.show();')
@circuits.handler("on_sceneadded")
def sceneadded(self, name):
#r.logInfo("CreateDestroy sceneadded called")
self.scene = naali.getScene(name)
| {
"content_hash": "da1c59add14a854a6bfcc6296b5d1bab",
"timestamp": "",
"source": "github",
"line_count": 402,
"max_line_length": 122,
"avg_line_length": 32.504975124378106,
"alnum_prop": 0.5548327848779367,
"repo_name": "antont/tundra",
"id": "1da9d43f709174e21de937a103278376b913d865",
"size": "13067",
"binary": false,
"copies": "1",
"ref": "refs/heads/tundra2",
"path": "src/Application/PythonScriptModule/pymodules_old/apitest/testrunner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "110345"
},
{
"name": "C#",
"bytes": "76173"
},
{
"name": "C++",
"bytes": "4959154"
},
{
"name": "CoffeeScript",
"bytes": "2229"
},
{
"name": "JavaScript",
"bytes": "316308"
},
{
"name": "Objective-C",
"bytes": "222359"
},
{
"name": "Python",
"bytes": "999850"
},
{
"name": "Shell",
"bytes": "8224"
},
{
"name": "TypeScript",
"bytes": "230019"
}
],
"symlink_target": ""
} |
"""
korean.morphology.morpheme
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import sys
from ..hangul import get_final, is_hangul
__all__ = ['Morpheme']
class MorphemeMetaclass(type):
def __new__(meta, name, bases, attrs):
from . import Morphology
cls = type.__new__(meta, name, bases, attrs)
cls._registry = {}
Morphology._register_morpheme(cls)
return cls
def __call__(cls, *forms):
if len(forms) == 1:
try:
return cls.get(forms[0])
except KeyError:
pass
return super(MorphemeMetaclass, cls).__call__(*forms)
class Morpheme(object):
"""This class presents a morpheme (형태소) or allomorph (이형태). It
can have one or more forms. The first form means the basic allomorph
(기본형).
:param forms: each forms of allomorph. the first form will be basic
allomorph.
"""
__metaclass__ = MorphemeMetaclass
_registry = None
def __init__(self, *forms):
assert all([isinstance(form, unicode) for form in forms])
self.forms = forms
@classmethod
def get(cls, key):
"""Returns a pre-defined morpheme object by the given key."""
return cls._registry[key]
@classmethod
def register(cls, key, obj):
"""Registers a pre-defined morpheme object to the given key."""
cls._registry[key] = obj
def read(self):
"""Every morpheme class would implement this method. They should make a
morpheme to the valid Korean text with Hangul.
"""
return unicode(self)
def basic(self):
"""The basic form of allomorph."""
return self.forms[0]
def __unicode__(self):
return self.basic()
def __str__(self):
return unicode(self).encode('utf-8')
if sys.version_info >= (3,):
__str__ = __unicode__
del __unicode__
def __getitem__(self, i):
return unicode(self)[i]
def __getslice__(self, start, stop, step=None):
return unicode(self)[start:stop:step]
def __format__(self, suffix):
return '{0!s}{1}'.format(self, suffix)
def __repr__(self):
return '{0}({1!s})'.format(type(self).__name__, unicode(self))
| {
"content_hash": "8fabd33cbd62dd97c04f11a2dee3902d",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 79,
"avg_line_length": 26.119565217391305,
"alnum_prop": 0.5751144402829796,
"repo_name": "lqez/korean",
"id": "f7c0fe6be4a5e2253307ceaed44011b2a53b97c3",
"size": "2445",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "korean/morphology/morpheme.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "64071"
}
],
"symlink_target": ""
} |
from collections import defaultdict, namedtuple
from minecraft_data.v1_8 import recipes as raw_recipes
RecipeItem = namedtuple('RecipeItem', 'id meta amount')
class Recipe(object):
def __init__(self, raw):
self.result = reformat_item(raw['result'], None)
if 'ingredients' in raw:
self.ingredients = [reformat_item(item, 0)
for item in raw['ingredients']]
self.in_shape = None
self.out_shape = None
else:
self.in_shape = reformat_shape(raw['inShape'])
self.out_shape = reformat_shape(raw['outShape']) \
if 'outShape' in raw else None
self.ingredients = [item for row in self.in_shape for item in row]
@property
def total_ingredient_amounts(self):
"""
Returns:
dict: In the form { (item_id, metadata) -> amount }
"""
totals = defaultdict(int)
for id, meta, amount in self.ingredients:
totals[(id, meta)] += amount
return totals
@property
def ingredient_positions(self):
"""
Returns:
dict: In the form { (item_id, metadata) -> [(x, y, amount), ...] }
"""
positions = defaultdict(list)
for y, row in enumerate(self.in_shape):
for x, (item_id, metadata, amount) in enumerate(row):
positions[(item_id, metadata)].append((x, y, amount))
return positions
def reformat_item(raw, default_meta=None):
if isinstance(raw, dict):
raw = raw.copy() # do not modify arg
if 'metadata' not in raw:
raw['metadata'] = default_meta
if 'count' not in raw:
raw['count'] = 1
return RecipeItem(raw['id'], raw['metadata'], raw['count'])
elif isinstance(raw, list):
return RecipeItem(raw[0], raw[1], 1)
else: # single ID or None
return RecipeItem(raw or None, default_meta, 1)
def reformat_shape(shape):
return [[reformat_item(item, None) for item in row] for row in shape]
def iter_recipes(item_id, meta=None):
item_id = str(item_id)
meta = meta and int(meta)
try:
recipes_for_item = raw_recipes[item_id]
except KeyError:
return # no recipe found, do not yield anything
else:
for raw in recipes_for_item:
recipe = Recipe(raw)
if meta is None or meta == recipe.result.meta:
yield recipe
def get_any_recipe(item, meta=None):
# TODO return small recipes if present
for matching in iter_recipes(item, meta):
return matching
return None
| {
"content_hash": "423bb7ab2a729318828caeef949bf9b8",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 78,
"avg_line_length": 31.795180722891565,
"alnum_prop": 0.5767336112163698,
"repo_name": "Gjum/SpockBot",
"id": "5525b37d5e854b425684cc9cd0b1a4550af9f3c7",
"size": "2639",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "spockbot/mcdata/recipes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "290360"
}
],
"symlink_target": ""
} |
"""
Interface definition for notifiers.
"""
import datetime
import string
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from ..exceptions import verify_type
__author__ = 'Aaron Hosford'
__all__ = [
'Notifier',
]
class Notifier(metaclass=ABCMeta):
"""
A notifier acts as a template for notifications, formatting the objects it is given into a
standardized template and sending the resulting notification on to a particular channel.
"""
@staticmethod
def interpolate(template, args, kwargs):
"""
Interpolate the given keyword arguments into a string template, handling the standard
notification parameters in a uniform way for all notifiers.
:param template: A string template.
:param args: The positional arguments to interpolate into the template.
:param kwargs: The keyword arguments to interpolate into the template.
:return: The interpolated string.
"""
verify_type(template, str)
verify_type(kwargs, (dict, defaultdict))
# TODO: Other defaults?
if 'time' not in kwargs:
kwargs.update(time=datetime.datetime.now())
# The defaultdict class doesn't work with str.format,
# so we have to parse it ourselves and add the keys.
formatter = string.Formatter()
for _, name, _, _ in formatter.parse(template):
if name and name not in kwargs:
kwargs[name] = None
# noinspection PyBroadException
try:
return template.format(*args, **kwargs)
except Exception:
# It's important that this never fails because it's used to report errors.
return template + ' (args: %r, kwargs: %r)' % (args, kwargs)
# TODO: Normalize this interface. It should take an optional message, then *args and **kwargs used for
# interpolation, and finally a keyword ony optional attachments list. All subclasses and their uses will also
# have to be updated.
@abstractmethod
def __call__(self, *args, attachments=None, **kwargs):
"""
Send a notification on this notifier's channel.
:param attachments: The file attachments, if any, to include in the notification.
:return: None
"""
raise NotImplementedError()
| {
"content_hash": "13e3d2bd9a6d2e33e3fc52d942a84c8e",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 119,
"avg_line_length": 32.3013698630137,
"alnum_prop": 0.6518235793044953,
"repo_name": "SaintAttila/attila",
"id": "092a67663b7ca1940d9de860959521aee9656ceb",
"size": "2358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "attila/abc/notifications.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "571"
},
{
"name": "Python",
"bytes": "407601"
}
],
"symlink_target": ""
} |
import logging
import json
from webob import Response
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller import dpset
# from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import CONFIG_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_3
from ryu.lib import ofctl_v1_0
from ryu.lib import ofctl_v1_2
from ryu.lib import ofctl_v1_3
from ryu.app.wsgi import ControllerBase, WSGIApplication, route
import patch_ofc_flowbuilder
import patch_ofc_error
'''
"L1patch" OpenFlow controller based on "OFPatchPanel".
See also "OFPatchPanel" application.
nmasao/OFPatchPanel-SDNHackathon2014 - GitHub
https://github.com/nmasao/OFPatchPanel-SDNHackathon2014
'''
patch_instance_name = 'patch_app'
LOG = logging.getLogger('ryu.app.patch.patch_rest')
class PatchPanel(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION,
ofproto_v1_2.OFP_VERSION,
ofproto_v1_3.OFP_VERSION]
_CONTEXTS = {
'wsgi': WSGIApplication,
'dpset': dpset.DPSet}
def __init__(self, *args, **kwargs):
super(PatchPanel, self).__init__(*args, **kwargs)
self.dpset = kwargs['dpset']
wsgi = kwargs['wsgi']
wsgi.register(PatchController, {patch_instance_name: self})
self.patch_flows = [] # list of dict(flow)
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
datapath = ev.msg.datapath
message = 'connected datapath: dpid=%d' % datapath.id
LOG.info(message)
deny_any_flow = {
'match': {
# match any
},
'actions': [
# empty : action DROP
],
'priority': 0 # lowest priority
}
if not self._mod_patch_flow_entry(
datapath, deny_any_flow, datapath.ofproto.OFPFC_ADD):
msg = "DPID:%s, Cannot set default deny flow rule." % datapath.id
raise patch_ofc_error.PatchOfcError(msg)
def add_patch_flow(self, req_flow):
self._mod_patch_flow(req_flow, 'put')
def delete_patch_flow(self, req_flow):
self._mod_patch_flow(req_flow, 'delete')
def _mod_patch_flow(self, req_flow, command):
# check command
if command not in ['delete', 'put']:
LOG.error("Unknown command: %s" % command)
return Response(status=501)
# Check before send flow-mod
dpid = req_flow.get('dpid')
dp = self.dpset.get(dpid)
if dp is None:
LOG.error("Cannot find datapath-id:%s" % dpid)
return Response(status=400)
# TODO: resource overwrap-check for exclusive mode wire
# for flow in self.patch_flows:
# if dpid == flow['dpid'] and inport == flow['inport']:
# LOG.info('Requested inport is already used (dpid:%s, inport:%d)', dpid, inport)
# return Response(status=400)
try:
flow_rules = patch_ofc_flowbuilder.FlowRuleBuilder(dp, req_flow).build_flow()
for flow_rule in flow_rules:
print "--------------------------"
print "%s, dpid:%d (ofp_ver:%d)" % (
command.upper(), dpid, dp.ofproto.OFP_VERSION
)
print json.dumps(req_flow)
print json.dumps(flow_rule)
self._mod_patch_flow_entry(
dp, flow_rule, self._get_datapath_command(dp, command)
)
self._post_mod_patch_flow(req_flow, command)
print "--------------------------"
cors_headers = {'Access-Control-Allow-Origin': '*'}
# Notice: Any request will accepted (status=200)
# if the request can send flow-mod to OFS
# (When the request does not have invalid dpid, invalid ofp-version.)
# Does not matter whether the request is match/correct.
return Response(status=200, headers=cors_headers)
except (patch_ofc_error.PatchOfcRestError,
patch_ofc_error.PatchOfcError) as err:
LOG.error(err.message)
return Response(status=501)
@staticmethod
def _get_datapath_command(dp, command):
if command == 'delete':
return dp.ofproto.OFPFC_DELETE
elif command == 'put':
return dp.ofproto.OFPFC_ADD
else:
msg = "Unknown command: %s" % command
raise patch_ofc_error.PatchOfcError(msg)
def _post_mod_patch_flow(self, req_flow, command):
if command == 'delete':
self._delete_from_patch_flows(req_flow)
elif command == 'put':
self.patch_flows.append(req_flow)
else:
msg = "Unknown command: %s" % command
raise patch_ofc_error.PatchOfcError(msg)
def _delete_from_patch_flows(self, req_flow):
# check each flows
req_flow_str = json.dumps(req_flow)
found_flow = None
for flow in self.patch_flows:
# TODO: now, use simplified/strict compare...
# difficult to compare recursively complex dict/list data.
# To compare it more simply, stringify these data...
# (json.dumps default: dictionary sorted.
flow_str = json.dumps(flow)
if req_flow_str == flow_str:
found_flow = flow
break
if found_flow:
self.patch_flows.remove(found_flow)
def _mod_patch_flow_entry(self, dp, flow_rule, command):
if dp.ofproto.OFP_VERSION in self.OFP_VERSIONS:
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ofctl_v1_0.mod_flow_entry(dp, flow_rule, command)
elif dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
ofctl_v1_2.mod_flow_entry(dp, flow_rule, command)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_flow_entry(dp, flow_rule, command)
return True
else:
msg = "Unsupported OFP version: %s" % dp.ofproto.OFP_VERSION
raise patch_ofc_error.PatchOfcError(msg)
def get_patch_flows(self):
body = json.dumps(self.patch_flows)
return Response(content_type='application/json',
body=body, status=200)
class PatchController(ControllerBase):
def __init__(self, req, link, data, **config):
super(PatchController, self).__init__(req, link, data, **config)
self.patch_app = data[patch_instance_name]
@route('patch', '/patch/flow', methods=['PUT'])
def add_patch_flow(self, req, **kwargs):
LOG.debug("start add_patch_flow")
patch = self.patch_app
try:
flow = eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
result = patch.add_patch_flow(flow)
return result
@route('patch', '/patch/flow', methods=['DELETE'])
def delete_patch_flow(self, req, **kwargs):
patch = self.patch_app
try:
flow = eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
result = patch.delete_patch_flow(flow)
return result
@route('patch', '/patch/flow', methods=['GET'])
def get_patch_flows(self, req, **kwargs):
patch = self.patch_app
result = patch.get_patch_flows()
return result
@route('patch', '/patch/flow', methods=['OPTIONS'])
def opts_patch_flows(self, req, **kwargs):
cors_headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'PUT, GET, DELETE, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Origin'
}
return Response(status=200, headers=cors_headers)
| {
"content_hash": "ec40c6454a9ee1038c29e428bbd332e5",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 97,
"avg_line_length": 37.65581395348837,
"alnum_prop": 0.5872035573122529,
"repo_name": "oolorg/ool-l1patch-dev",
"id": "b2175e6a84cc7d373d62db48aee5e857fa3ba3a6",
"size": "8096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "patch_ofc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "86189"
}
],
"symlink_target": ""
} |
from collections import defaultdict
# 3p
from kafka import KafkaClient
from kafka.common import OffsetRequestPayload as OffsetRequest
from kazoo.client import KazooClient
from kazoo.exceptions import NoNodeError
# project
from checks import AgentCheck
DEFAULT_KAFKA_TIMEOUT = 5
DEFAULT_ZK_TIMEOUT = 5
class KafkaCheck(AgentCheck):
SOURCE_TYPE_NAME = 'kafka'
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances=instances)
self.zk_timeout = int(
init_config.get('zk_timeout', DEFAULT_ZK_TIMEOUT))
self.kafka_timeout = int(
init_config.get('kafka_timeout', DEFAULT_KAFKA_TIMEOUT))
def check(self, instance):
consumer_groups = self.read_config(instance, 'consumer_groups',
cast=self._validate_consumer_groups)
zk_connect_str = self.read_config(instance, 'zk_connect_str')
kafka_host_ports = self.read_config(instance, 'kafka_connect_str')
# Construct the Zookeeper path pattern
zk_prefix = instance.get('zk_prefix', '')
zk_path_tmpl = zk_prefix + '/consumers/%s/offsets/%s/%s'
# Connect to Zookeeper
zk_conn = KazooClient(zk_connect_str, timeout=self.zk_timeout)
zk_conn.start()
try:
# Query Zookeeper for consumer offsets
consumer_offsets = {}
topics = defaultdict(set)
for consumer_group, topic_partitions in consumer_groups.iteritems():
for topic, partitions in topic_partitions.iteritems():
# Remember the topic partitions that we've see so that we can
# look up their broker offsets later
topics[topic].update(set(partitions))
for partition in partitions:
zk_path = zk_path_tmpl % (consumer_group, topic, partition)
try:
consumer_offset = int(zk_conn.get(zk_path)[0])
key = (consumer_group, topic, partition)
consumer_offsets[key] = consumer_offset
except NoNodeError:
self.log.warn('No zookeeper node at %s' % zk_path)
except Exception:
self.log.exception('Could not read consumer offset from %s' % zk_path)
finally:
try:
zk_conn.stop()
zk_conn.close()
except Exception:
self.log.exception('Error cleaning up Zookeeper connection')
# Connect to Kafka
kafka_conn = KafkaClient(kafka_host_ports, timeout=self.kafka_timeout)
try:
# Query Kafka for the broker offsets
broker_offsets = {}
for topic, partitions in topics.items():
offset_responses = kafka_conn.send_offset_request([
OffsetRequest(topic, p, -1, 1) for p in partitions])
for resp in offset_responses:
broker_offsets[(resp.topic, resp.partition)] = resp.offsets[0]
finally:
try:
kafka_conn.close()
except Exception:
self.log.exception('Error cleaning up Kafka connection')
# Report the broker data
for (topic, partition), broker_offset in broker_offsets.items():
broker_tags = ['topic:%s' % topic, 'partition:%s' % partition]
broker_offset = broker_offsets.get((topic, partition))
self.gauge('kafka.broker_offset', broker_offset, tags=broker_tags)
# Report the consumer
for (consumer_group, topic, partition), consumer_offset in consumer_offsets.items():
# Get the broker offset
broker_offset = broker_offsets.get((topic, partition))
# Report the consumer offset and lag
tags = ['topic:%s' % topic, 'partition:%s' % partition,
'consumer_group:%s' % consumer_group]
self.gauge('kafka.consumer_offset', consumer_offset, tags=tags)
self.gauge('kafka.consumer_lag', broker_offset - consumer_offset,
tags=tags)
def _validate_consumer_groups(self, val):
try:
consumer_group, topic_partitions = val.items()[0]
assert isinstance(consumer_group, (str, unicode))
topic, partitions = topic_partitions.items()[0]
assert isinstance(topic, (str, unicode))
assert isinstance(partitions, (list, tuple))
return val
except Exception as e:
self.log.exception(e)
raise Exception('''The `consumer_groups` value must be a mapping of mappings, like this:
consumer_groups:
myconsumer0: # consumer group name
mytopic0: [0, 1] # topic: list of partitions
myconsumer1:
mytopic0: [0, 1, 2]
mytopic1: [10, 12]
''')
| {
"content_hash": "1a6a7a467abf958addfa31d2f1d76f51",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 100,
"avg_line_length": 41.34710743801653,
"alnum_prop": 0.5818508894663202,
"repo_name": "StackVista/sts-agent-integrations-core",
"id": "cca44a003bcf50ec4c24ef0a2526c11dad087d38",
"size": "5013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kafka_consumer/check.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "127"
},
{
"name": "Erlang",
"bytes": "15429"
},
{
"name": "Go",
"bytes": "1471"
},
{
"name": "Perl",
"bytes": "5845"
},
{
"name": "Python",
"bytes": "2138291"
},
{
"name": "Ruby",
"bytes": "169389"
},
{
"name": "Shell",
"bytes": "15492"
},
{
"name": "TSQL",
"bytes": "1239"
}
],
"symlink_target": ""
} |
from Foundation import NSObject
from AppKit import NSApp, NSWindow, NSPanel, NSScreen, NSWindowController, NSToolbar, NSToolbarItem, NSImage, NSNormalWindowLevel, NSFloatingWindowLevel, NSClosableWindowMask, NSMiniaturizableWindowMask, NSResizableWindowMask, NSTexturedBackgroundWindowMask, NSUnifiedTitleAndToolbarWindowMask, NSHUDWindowMask, NSUtilityWindowMask, NSTitledWindowMask, NSBorderlessWindowMask, NSBackingStoreBuffered, NSToolbarFlexibleSpaceItemIdentifier, NSToolbarSpaceItemIdentifier, NSToolbarSeparatorItemIdentifier, NSToolbarCustomizeToolbarItemIdentifier, NSToolbarPrintItemIdentifier, NSToolbarShowFontsItemIdentifier, NSToolbarShowColorsItemIdentifier, NSToolbarDisplayModeDefault, NSToolbarDisplayModeIconAndLabel, NSToolbarDisplayModeIconOnly, NSToolbarDisplayModeLabelOnly, NSToolbarSizeModeDefault, NSToolbarSizeModeRegular, NSToolbarSizeModeSmall
from objc import python_method
from vanilla.vanillaBase import _breakCycles, _calcFrame, _setAttr, _delAttr, _addAutoLayoutRules, _flipFrame, \
VanillaCallbackWrapper, VanillaError, VanillaWarning, VanillaBaseControl, \
osVersionCurrent, osVersion10_7, osVersion10_10, osVersion10_16
# PyObjC may not have these constants wrapped,
# so test and fallback if needed.
try:
from AppKit import NSWindowCollectionBehaviorFullScreenPrimary, NSWindowCollectionBehaviorFullScreenAuxiliary
except ImportError:
NSWindowCollectionBehaviorFullScreenPrimary = 1 << 7
NSWindowCollectionBehaviorFullScreenAuxiliary = 1 << 8
try:
from AppKit import NSWindowTitleVisible, NSWindowTitleHidden
except ImportError:
NSWindowTitleVisible = 0
NSWindowTitleHidden = 1
try:
# https://developer.apple.com/documentation/appkit/nswindowtoolbarstyle?language=objc
from AppKit import NSWindowToolbarStyleAutomatic, NSWindowToolbarStyleExpanded, NSWindowToolbarStylePreference, NSWindowToolbarStyleUnified, NSWindowToolbarStyleUnifiedCompact
except ImportError:
NSWindowToolbarStyleAutomatic = 0
NSWindowToolbarStyleExpanded = 1
NSWindowToolbarStylePreference = 2
NSWindowToolbarStyleUnified = 3
NSWindowToolbarStyleUnifiedCompact = 4
try:
from AppKit import NSFullSizeContentViewWindowMask
except ImportError:
NSFullSizeContentViewWindowMask = 1 << 15
class Window(NSObject):
"""
A window capable of containing controls.
.. image:: /_images/Window.png
To add a control to a window, simply set it as an attribute of the window.
::
from vanilla import Window, Button, TextBox
class WindowDemo:
def __init__(self):
self.w = Window((200, 70), "Window Demo")
self.w.myButton = Button((10, 10, -10, 20), "My Button")
self.w.myTextBox = TextBox((10, 40, -10, 17), "My Text Box")
self.w.open()
WindowDemo()
No special naming is required for the attributes. However, each attribute
must have a unique name.
**posSize** Tuple of form *(left, top, width, height)* representing the
position and size of the window. It may also be a tuple of form *(width, height)*.
In this case, the window will be positioned on screen automatically.
**title** The title to be set in the title bar of the window.
**minSize** Tuple of the form *(width, height)* representing the minimum size
that the window can be resized to.
**maxSize** Tuple of the form *(width, height)* representing the maximum size
that the window can be resized to.
**textured** Boolean value representing if the window should have a textured
appearance or not.
**autosaveName** A string representing a unique name for the window.
If given, this name will be used to store the window position and size in
the application preferences.
**closable** Boolean value representing if the window should have a
close button in the title bar.
**miniaturizable** Boolean value representing if the window should have a
minimize button in the title bar.
**initiallyVisible** Boolean value representing if the window will be
initially visible. Default is *True*. If *False*, you can show the window later
by calling `window.show()`.
**fullScreenMode** An indication of the full screen mode. These are the options:
+---------------+---------------------------------------------------------------+
| *None* | The window does not allow full screen. |
+---------------+---------------------------------------------------------------+
| *"primary"* | Corresponds to NSWindowCollectionBehaviorFullScreenPrimary. |
+---------------+---------------------------------------------------------------+
| *"auxiliary"* | Corresponds to NSWindowCollectionBehaviorFullScreenAuxiliary. |
+---------------+---------------------------------------------------------------+
**titleVisible** Boolean value indicating if the window title should be displayed.
**fullSizeContentView** Boolean value indicating if the content view should be the
full size of the window, including the area underneath the titlebar and toolbar.
**screen** A `NSScreen`_ object indicating the screen that the window should be
drawn to. When None the window will be drawn to the main screen.
.. _NSScreen: https://developer.apple.com/documentation/appkit/nsscreen?language=objc
"""
def __new__(cls, *args, **kwargs):
return cls.alloc().init()
nsWindowStyleMask = NSTitledWindowMask | NSUnifiedTitleAndToolbarWindowMask
nsWindowClass = NSWindow
nsWindowLevel = NSNormalWindowLevel
def __init__(self, posSize, title="", minSize=None, maxSize=None, textured=False,
autosaveName=None, closable=True, miniaturizable=True, initiallyVisible=True,
fullScreenMode=None, titleVisible=True, fullSizeContentView=False, screen=None):
mask = self.nsWindowStyleMask
if closable:
mask = mask | NSClosableWindowMask
if miniaturizable:
mask = mask | NSMiniaturizableWindowMask
if minSize or maxSize:
mask = mask | NSResizableWindowMask
if textured:
mask = mask | NSTexturedBackgroundWindowMask
if fullSizeContentView and osVersionCurrent >= osVersion10_10:
mask = mask | NSFullSizeContentViewWindowMask
# start the window
## too magical?
if len(posSize) == 2:
l = t = 100
w, h = posSize
cascade = True
else:
l, t, w, h = posSize
cascade = False
if screen is None:
screen = NSScreen.mainScreen()
frame = _calcFrame(screen.visibleFrame(), ((l, t), (w, h)))
self._window = self.nsWindowClass.alloc().initWithContentRect_styleMask_backing_defer_screen_(
frame, mask, NSBackingStoreBuffered, False, screen)
if autosaveName is not None:
# This also sets the window frame if it was previously stored.
# Make sure we do this before cascading.
self._window.setFrameAutosaveName_(autosaveName)
if cascade:
self._cascade()
if minSize is not None:
self._window.setMinSize_(minSize)
if maxSize is not None:
self._window.setMaxSize_(maxSize)
self._window.setTitle_(title)
self._window.setLevel_(self.nsWindowLevel)
self._window.setReleasedWhenClosed_(False)
self._window.setDelegate_(self)
self._autoLayoutViews = {}
self._bindings = {}
self._initiallyVisible = initiallyVisible
# full screen mode
if osVersionCurrent >= osVersion10_7:
if fullScreenMode is None:
pass
elif fullScreenMode == "primary":
self._window.setCollectionBehavior_(NSWindowCollectionBehaviorFullScreenPrimary)
elif fullScreenMode == "auxiliary":
self._window.setCollectionBehavior_(NSWindowCollectionBehaviorFullScreenAuxiliary)
# titlebar visibility
if osVersionCurrent >= osVersion10_10:
if not titleVisible:
self._window.setTitleVisibility_(NSWindowTitleHidden)
else:
self._window.setTitleVisibility_(NSWindowTitleVisible)
# full size content view
if fullSizeContentView and osVersionCurrent >= osVersion10_10:
self._window.setTitlebarAppearsTransparent_(True)
def _testForDeprecatedAttributes(self):
from warnings import warn
if hasattr(self, "_nsWindowStyleMask"):
warn(DeprecationWarning("The _nsWindowStyleMask attribute is deprecated. Use the nsWindowStyleMask attribute."))
self.nsWindowStyleMask = self._nsWindowStyleMask
if hasattr(self, "_nsWindowClass"):
warn(DeprecationWarning("The _nsWindowClass attribute is deprecated. Use the nsWindowClass attribute."))
self.nsWindowClass = self._nsWindowClass
if hasattr(self, "_nsWindowLevel"):
warn(DeprecationWarning("The _nsWindowLevel attribute is deprecated. Use the nsWindowLevel attribute."))
self.nsWindowLevel = self._nsWindowLevel
def _cascade(self):
allLeftTop = []
for other in NSApp().orderedWindows():
if other == self._window:
continue
(oL, oB), (oW, oH) = other.frame()
allLeftTop.append((oL, oB + oH))
(sL, sB), (sW, sH) = self._window.frame()
leftTop = sL, sB + sH
while leftTop in allLeftTop:
leftTop = self._window.cascadeTopLeftFromPoint_(leftTop)
self._window.setFrameTopLeftPoint_(leftTop)
def _breakCycles(self):
_breakCycles(self._window.contentView())
drawers = self._window.drawers()
if drawers is not None:
for drawer in drawers:
_breakCycles(drawer.contentView())
def _getContentView(self):
return self._window.contentView()
def __setattr__(self, attr, value):
_setAttr(Window, self, attr, value)
def __delattr__(self, attr):
_delAttr(Window, self, attr)
@python_method
def assignToDocument(self, document):
"""
Add this window to the list of windows associated with a document.
**document** should be a `NSDocument`_ instance.
.. _NSDocument: https://developer.apple.com/documentation/appkit/nsdocument?language=objc
"""
document.addWindowController_(self.getNSWindowController())
def getNSWindow(self):
"""
Return the `NSWindow`_ that this Vanilla object wraps.
.. _NSWindow: https://developer.apple.com/documentation/appkit/nswindow?language=objc
"""
return self._window
def getNSWindowController(self):
"""
Return an `NSWindowController`_ for the `NSWindow`_ that this Vanilla
object wraps, creating a one if needed.
.. _NSWindowController: https://developer.apple.com/documentation/appkit/nswindowcontroller?language=objc
"""
controller = self._window.windowController()
if controller is None:
controller = NSWindowController.alloc().initWithWindow_(self._window)
return controller
def open(self):
"""
Open the window.
"""
if self._window is None:
raise ValueError("can't re-open a window")
if self._initiallyVisible:
self.show()
# We retain ourselves to ensure we don't go away, even if our
# caller doesn't keep a reference. It's balanced by a release
# in windowWillClose_().
self.retain()
self._validateMinMaxSize()
def _validateMinMaxSize(self):
# warn when the min size is bigger then the initial window size
# or when the max size is smaller then the initial window size
size = self._window.frame().size
minSize = self._window.minSize()
maxSize = self._window.maxSize()
if size.width < minSize.width or size.height < minSize.height:
from warnings import warn
warn("The windows `minSize` is bigger then the initial size.", VanillaWarning)
elif size.width > maxSize.width or size.height > maxSize.height:
from warnings import warn
warn("The windows `maxSize` is bigger then the initial size.", VanillaWarning)
def close(self):
"""
Close the window.
Once a window has been closed it can not be re-opened.
"""
if self._window.isSheet():
NSApp().endSheet_(self._window)
self._window.orderOut_(None)
self._window.close()
def hide(self):
"""
Hide the window.
"""
self._window.orderOut_(None)
def show(self):
"""
Show the window if it is hidden.
"""
self._window.makeKeyAndOrderFront_(None)
def makeKey(self):
"""
Make the window the key window.
"""
self._window.makeKeyWindow()
def makeMain(self):
"""
Make the window the main window.
"""
self._window.makeMainWindow()
@python_method
def setTitle(self, title):
"""
Set the title in the window's title bar.
**title** should be a string.
"""
self._window.setTitle_(title)
def getTitle(self):
"""
The title in the window's title bar.
"""
return self._window.title()
def select(self):
"""
Select the window if it is not the currently selected window.
"""
self._window.makeKeyWindow()
def isVisible(self):
"""
A boolean value representing if the window is visible or not.
"""
return self._window.isVisible()
def _calculateTitlebarHeight(self):
# Note: this will include the toolbar height if there is one
contentFrame = self._window.contentRectForFrameRect_(self._window.frame())
windowFrame = self._window.frame()
contentHeight = contentFrame.size[1]
windowHeight = windowFrame.size[1]
return windowHeight - contentHeight
def getPosSize(self):
"""
A tuple of form *(left, top, width, height)* representing the window's
position and size.
"""
frame = self._window.frame()
l, t, w, h = _flipFrame(self._window.screen().visibleFrame(), frame)
titlebarHeight = self._calculateTitlebarHeight()
t += titlebarHeight
h -= titlebarHeight
return (l, t, w, h)
@python_method
def setPosSize(self, posSize, animate=True):
"""
Set the position and size of the window.
**posSize** A tuple of form *(left, top, width, height)*.
"""
titlebarHeight = self._calculateTitlebarHeight()
l, t, w, h = posSize
t -= titlebarHeight
h += titlebarHeight
screenFrame = self._window.screen().visibleFrame()
# if the top is less than zero, force it to zero.
# otherwise the window will be thrown to the bottom
# of the screen.
if t < 0:
t = 0
# the screen frame could have a bottom
# value that is not zero. this will cause
# an error if (and only if) a window is
# being positioned at the top of the screen.
# so, adjust it.
(sL, sB), (sW, sH) = screenFrame
screenFrame = ((sL, 0), (sW, sH + sB))
frame = _calcFrame(screenFrame, ((l, t), (w, h)), absolutePositioning=True)
self._window.setFrame_display_animate_(frame, True, animate)
@python_method
def addAutoPosSizeRules(self, rules, metrics=None):
"""
Add auto layout rules for controls/view in this view.
**rules** must by a list of rule definitions.
Rule definitions may take two forms:
* strings that follow the `Visual Format Language`_
* dictionaries with the following key/value pairs:
+---------------------------+-------------------------------------------------------------+
| key | value |
+===========================+=============================================================+
| *"view1"* | The vanilla wrapped view for the left side of the rule. |
+---------------------------+-------------------------------------------------------------+
| *"attribute1"* | The attribute of the view for the left side of the rule. |
| | See below for options. |
+---------------------------+-------------------------------------------------------------+
| *"relation"* (optional) | The relationship between the left side of the rule |
| | and the right side of the rule. See below for options. |
| | The default value is `"=="`. |
+---------------------------+-------------------------------------------------------------+
| *"view2"* | The vanilla wrapped view for the right side of the rule. |
+---------------------------+-------------------------------------------------------------+
| *"attribute2"* | The attribute of the view for the right side of the rule. |
| | See below for options. |
+---------------------------+-------------------------------------------------------------+
| *"multiplier"* (optional) | The constant multiplied with the attribute on the right |
| | side of the rule as part of getting the modified attribute. |
| | The default value is `1`. |
+---------------------------+-------------------------------------------------------------+
| *"constant"* (optional) | The constant added to the multiplied attribute value on |
| | the right side of the rule to yield the final modified |
| | attribute. The default value is `0`. |
+---------------------------+-------------------------------------------------------------+
The `attribute1` and `attribute2` options are:
+-------------------+--------------------------------+
| value | AppKit equivalent |
+===================+================================+
| *"left"* | NSLayoutAttributeLeft |
+-------------------+--------------------------------+
| *"right"* | NSLayoutAttributeRight |
+-------------------+--------------------------------+
| *"top"* | NSLayoutAttributeTop |
+-------------------+--------------------------------+
| *"bottom"* | NSLayoutAttributeBottom |
+-------------------+--------------------------------+
| *"leading"* | NSLayoutAttributeLeading |
+-------------------+--------------------------------+
| *"trailing"* | NSLayoutAttributeTrailing |
+-------------------+--------------------------------+
| *"width"* | NSLayoutAttributeWidth |
+-------------------+--------------------------------+
| *"height"* | NSLayoutAttributeHeight |
+-------------------+--------------------------------+
| *"centerX"* | NSLayoutAttributeCenterX |
+-------------------+--------------------------------+
| *"centerY"* | NSLayoutAttributeCenterY |
+-------------------+--------------------------------+
| *"baseline"* | NSLayoutAttributeBaseline |
+-------------------+--------------------------------+
| *"lastBaseline"* | NSLayoutAttributeLastBaseline |
+-------------------+--------------------------------+
| *"firstBaseline"* | NSLayoutAttributeFirstBaseline |
+-------------------+--------------------------------+
Refer to the `NSLayoutAttribute documentation`_ for the information
about what each of these do.
The `relation` options are:
+--------+------------------------------------+
| value | AppKit equivalent |
+========+====================================+
| *"<="* | NSLayoutRelationLessThanOrEqual |
+--------+------------------------------------+
| *"=="* | NSLayoutRelationEqual |
+--------+------------------------------------+
| *">="* | NSLayoutRelationGreaterThanOrEqual |
+--------+------------------------------------+
Refer to the `NSLayoutRelation documentation`_ for the information
about what each of these do.
**metrics** may be either **None** or a dict containing
key value pairs representing metrics keywords used in the
rules defined with strings.
.. _Visual Format Language: https://developer.apple.com/library/archive/documentation/UserExperience/Conceptual/AutolayoutPG/VisualFormatLanguage.html#//apple_ref/doc/uid/TP40010853-CH27-SW1
.. _NSLayoutAttribute documentation: https://developer.apple.com/documentation/uikit/nslayoutattribute?language=objc
.. _NSLayoutRelation documentation: https://developer.apple.com/documentation/uikit/nslayoutrelation?language=objc
"""
_addAutoLayoutRules(self, rules, metrics)
def center(self):
"""
Center the window within the screen.
"""
self._window.center()
@python_method
def move(self, x, y, animate=True):
"""
Move the window by **x** units and **y** units.
"""
(l, b), (w, h) = self._window.frame()
l = l + x
b = b - y
self._window.setFrame_display_animate_(((l, b), (w, h)), True, animate)
@python_method
def resize(self, width, height, animate=True):
"""
Change the size of the window to **width** and **height**.
"""
l, t, w, h = self.getPosSize()
self.setPosSize((l, t, width, height), animate)
@python_method
def setDefaultButton(self, button):
"""
Set the default button in the window.
**button** will be bound to the Return and Enter keys.
"""
if not isinstance(button, VanillaBaseControl):
raise VanillaError("invalid object")
cell = button._nsObject.cell()
self._window.setDefaultButtonCell_(cell)
@python_method
def bind(self, event, callback):
"""
Bind a callback to an event.
**event** A string representing the desired event. The options are:
+-------------------+----------------------------------------------------------------------+
| *"should close"* | Called when the user attempts to close the window. This must return |
| | a bool indicating if the window should be closed or not. |
+-------------------+----------------------------------------------------------------------+
| *"close"* | Called immediately before the window closes. |
+-------------------+----------------------------------------------------------------------+
| *"move"* | Called immediately after the window is moved. |
+-------------------+----------------------------------------------------------------------+
| *"resize"* | Called immediately after the window is resized. |
+-------------------+----------------------------------------------------------------------+
| *"became main"* | Called immediately after the window has become the main window. |
+-------------------+----------------------------------------------------------------------+
| *"resigned main"* | Called immediately after the window has lost its main window status. |
+-------------------+----------------------------------------------------------------------+
| *"became key"* | Called immediately after the window has become the key window. |
+-------------------+----------------------------------------------------------------------+
| *"resigned key"* | Called immediately after the window has lost its key window status. |
+-------------------+----------------------------------------------------------------------+
*For more information about main and key windows, refer to the Cocoa `documentation`_
on the subject.*
.. _documentation: https://developer.apple.com/documentation/Cocoa/Conceptual/WinPanel/Concepts/ChangingMainKeyWindow.html
**callback** The callback that will be called when the event occurs.
It should accept a *sender* argument which will be the Window that called the callback.
::
from vanilla import Window
class WindowBindDemo:
def __init__(self):
self.w = Window((200, 200))
self.w.bind("move", self.windowMoved)
self.w.open()
def windowMoved(self, sender):
print("window moved!", sender)
WindowBindDemo()
"""
if event not in self._bindings:
self._bindings[event] = []
self._bindings[event].append(callback)
@python_method
def unbind(self, event, callback):
"""
Unbind a callback from an event.
**event** A string representing the desired event.
Refer to *bind* for the options.
**callback** The callback that has been bound to the event.
"""
self._bindings[event].remove(callback)
@python_method
def _alertBindings(self, key):
# test to see if the attr exists.
# this is necessary because NSWindow
# can move the window (and therefore
# call the delegate method which calls
# this method) before the super
# call in __init__ is complete.
returnValues = []
if hasattr(self, "_bindings"):
if key in self._bindings:
for callback in self._bindings[key]:
value = callback(self)
if value is not None:
# elimitate None return value
returnValues.append(value)
return all(returnValues)
def windowWillClose_(self, notification):
self.hide()
self._alertBindings("close")
# remove all bindings to prevent circular refs
if hasattr(self, "_bindings"):
del self._bindings
self._breakCycles()
# We must make sure that the window does _not_ get deallocated during
# windowWillClose_, or weird things happen, such as that the window
# below this window doesn't always properly gets activated. (For reference:
# this happens when closing with cmd-W, but not when clicking the close
# control.)
# Yet we want to get rid of the NSWindow object here, mostly as a flag
# so we can disallow re-opening windows. So we retain/autorelease the
# NSWindow, then get rid of our own reference.
self._window.retain()
self._window.autorelease()
self._window = None # make sure we can't re-open the window
self.autorelease() # see self.open()
def windowDidBecomeKey_(self, notification):
self._alertBindings("became key")
def windowDidResignKey_(self, notification):
self._alertBindings("resigned key")
def windowDidBecomeMain_(self, notification):
self._alertBindings("became main")
def windowDidResignMain_(self, notification):
self._alertBindings("resigned main")
def windowDidMove_(self, notification):
self._alertBindings("move")
def windowDidResize_(self, notification):
self._alertBindings("resize")
def windowDidEnterFullScreen_(self, notification):
self._alertBindings("enter full screen")
def windowWillEnterFullScreen_(self, notification):
self._alertBindings("will enter full screen")
def windowDidExitFullScreen_(self, notification):
self._alertBindings("exit full screen")
def windowWillExitFullScreen_(self, notification):
self._alertBindings("will exit full screen")
def windowShouldClose_(self, notification):
shouldClose = self._alertBindings("should close")
if shouldClose is None:
shouldClose = True
return shouldClose
# -------
# Toolbar
# -------
# credit where credit is due: much of this was learned
# from the PyObjC demo: WSTConnectionWindowControllerClass
@python_method
def setToolbarStyle(self, toolbarStyle):
"""
Set a toolbar style for the window.
**toolbarStyle** A string represetnting the desired toolbar style
+------------------+
| "default" |
+------------------+
| "expanded" |
+------------------+
| "preference" |
+------------------+
| "unified" |
+------------------+
| "unifiedCompact" |
+------------------+
"""
if osVersionCurrent >= osVersion10_16:
toolbarStyleMap = dict(
default=NSWindowToolbarStyleAutomatic,
expanded=NSWindowToolbarStyleExpanded,
preference=NSWindowToolbarStylePreference,
unified=NSWindowToolbarStyleUnified,
unifiedCompact=NSWindowToolbarStyleUnifiedCompact,
)
self._window.setToolbarStyle_(toolbarStyleMap[toolbarStyle])
@python_method
def addToolbar(self, toolbarIdentifier, toolbarItems, addStandardItems=True, displayMode="default", sizeStyle="default", toolbarStyle="default"):
"""
Add a toolbar to the window.
**toolbarIdentifier** A string representing a unique name for the toolbar.
**toolbarItems** An ordered list of dictionaries containing the following items:
+-------------------------------+---------------------------------------------------------------------------+
| *itemIdentifier* | A unique string identifier for the item. This is only used internally. |
+-------------------------------+---------------------------------------------------------------------------+
| *label* (optional) | The text label for the item. Defaults to *None*. |
+-------------------------------+---------------------------------------------------------------------------+
| *paletteLabel* (optional) | The text label shown in the customization palette. Defaults to *label*. |
+-------------------------------+---------------------------------------------------------------------------+
| *toolTip* (optional) | The tool tip for the item. Defaults to *label*. |
+-------------------------------+---------------------------------------------------------------------------+
| *imagePath* (optional) | A file path to an image. Defaults to *None*. |
+-------------------------------+---------------------------------------------------------------------------+
| *imageNamed* (optional) | The name of an image already loaded as a `NSImage`_ by the application. |
| | Defaults to *None*. |
+-------------------------------+---------------------------------------------------------------------------+
| *imageObject* (optional) | A `NSImage`_ object. Defaults to *None*. |
+-------------------------------+---------------------------------------------------------------------------+
| *imageTemplate* (optional) | A boolean representing if the image should converted to a template image. |
+-------------------------------+---------------------------------------------------------------------------+
| *selectable* (optional) | A boolean representing if the item is selectable or not. The default |
| | value is _False_. For more information on selectable toolbar items, refer |
| | to Apple's documentation. |
+-------------------------------+---------------------------------------------------------------------------+
| *view* (optional) | A `NSView`_ object to be used instead of an image. Defaults to *None*. |
+-------------------------------+---------------------------------------------------------------------------+
| *visibleByDefault* (optional) | If the item should be visible by default pass True to this argument. |
| | If the item should be added to the toolbar only through the customization |
| | palette, use a value of _False_. Defaults to _True_. |
+-------------------------------+---------------------------------------------------------------------------+
.. _NSImage: https://developer.apple.com/documentation/appkit/nsimage?language=objc
**addStandardItems** A boolean, specifying whether the standard Cocoa toolbar items
should be added. Defaults to *True*. If you set it to *False*, you must specify any
standard items manually in *toolbarItems*, by using the constants from the AppKit module:
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarSeparatorItemIdentifier* | The Separator item. |
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarSpaceItemIdentifier* | The Space item. |
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarFlexibleSpaceItemIdentifier* | The Flexible Space item. |
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarShowColorsItemIdentifier* | The Colors item. Shows the color panel. |
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarShowFontsItemIdentifier* | The Fonts item. Shows the font panel. |
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarCustomizeToolbarItemIdentifier* | The Customize item. Shows the customization palette. |
+-------------------------------------------+----------------------------------------------------------------+
| *NSToolbarPrintItemIdentifier* | The Print item. Refer to Apple's *NSToolbarItem* documentation |
| | for more information. |
+-------------------------------------------+----------------------------------------------------------------+
**displayMode** A string representing the desired display mode for the toolbar.
+-------------+
| "default" |
+-------------+
| "iconLabel" |
+-------------+
| "icon" |
+-------------+
| "label" |
+-------------+
**sizeStyle** A string representing the desired size for the toolbar
+-----------+
| "default" |
+-----------+
| "regular" |
+-----------+
| "small" |
+-----------+
**toolbarStyle** A string represetnting the desired toolbar style
+------------------+
| "default" |
+------------------+
| "expanded" |
+------------------+
| "preference" |
+------------------+
| "unified" |
+------------------+
| "unifiedCompact" |
+------------------+
Returns a dictionary containing the created toolbar items, mapped by itemIdentifier.
"""
STANDARD_TOOLBAR_ITEMS = [
NSToolbarFlexibleSpaceItemIdentifier,
NSToolbarSpaceItemIdentifier,
NSToolbarSeparatorItemIdentifier,
NSToolbarCustomizeToolbarItemIdentifier,
NSToolbarPrintItemIdentifier,
NSToolbarShowFontsItemIdentifier,
NSToolbarShowColorsItemIdentifier,
]
# create the reference structures
self._toolbarItems = {}
self._toolbarDefaultItemIdentifiers = []
self._toolbarAllowedItemIdentifiers = []
self._toolbarCallbackWrappers = {}
self._toolbarSelectableItemIdentifiers = []
# create the toolbar items
for itemData in toolbarItems:
self._createToolbarItem(itemData)
if addStandardItems:
for standardItem in STANDARD_TOOLBAR_ITEMS:
if standardItem not in self._toolbarAllowedItemIdentifiers:
self._toolbarAllowedItemIdentifiers.append(standardItem)
# create the toolbar
toolbar = NSToolbar.alloc().initWithIdentifier_(toolbarIdentifier)
toolbar.setDelegate_(self)
toolbar.setAllowsUserCustomization_(True)
toolbar.setAutosavesConfiguration_(True)
displayModeMap = dict(
default=NSToolbarDisplayModeDefault,
iconLabel=NSToolbarDisplayModeIconAndLabel,
icon=NSToolbarDisplayModeIconOnly,
label=NSToolbarDisplayModeLabelOnly,
)
toolbar.setDisplayMode_(displayModeMap[displayMode])
sizeStyleMap = dict(
default=NSToolbarSizeModeDefault,
regular=NSToolbarSizeModeRegular,
small=NSToolbarSizeModeSmall
)
toolbar.setSizeMode_(sizeStyleMap[sizeStyle])
self.setToolbarStyle(toolbarStyle)
self._window.setToolbar_(toolbar)
# Return the dict of toolbar items, so our caller can choose to
# keep references to them if needed.
return self._toolbarItems
def getToolbarItems(self):
if hasattr(self, "_toolbarItems"):
return self._toolbarItems
return {}
@python_method
def addToolbarItem(self, itemData, index=None):
"""
Add a toolbar item to the windows toolbar.
**itemData** item description with the same format as a toolbarItem description in `addToolbar`
**index** An integer, specifying the place to insert the toolbar itemIdentifier.
"""
if not hasattr(self, "_toolbarItems"):
raise VanillaError("window has not toolbar")
itemIdentifier = itemData.get("itemIdentifier")
self._createToolbarItem(itemData)
if itemData.get("visibleByDefault", True):
if index is not None:
self._toolbarDefaultItemIdentifiers.remove(itemIdentifier)
self._toolbarDefaultItemIdentifiers.insert(index, itemIdentifier)
index = self._toolbarDefaultItemIdentifiers.index(itemIdentifier)
self._window.toolbar().insertItemWithItemIdentifier_atIndex_(itemIdentifier, index)
@python_method
def removeToolbarItem(self, itemIdentifier):
"""
Remove a toolbar item by his identifier.
**itemIdentifier** A unique string identifier for the removed item.
"""
if not hasattr(self, "_toolbarItems"):
raise VanillaError("window has not toolbar")
if itemIdentifier not in self._toolbarItems:
raise VanillaError("itemIdentifier %r not in toolbar" % itemIdentifier)
item = self._toolbarItems[itemIdentifier]
toolbarItems = self._window.toolbar().items()
if item in toolbarItems:
## it can happen a user changed the toolbar manually
index = toolbarItems.indexOfObject_(item)
self._window.toolbar().removeItemAtIndex_(index)
self._toolbarAllowedItemIdentifiers.remove(itemIdentifier)
self._toolbarDefaultItemIdentifiers.remove(itemIdentifier)
del self._toolbarItems[itemIdentifier]
@python_method
def _createToolbarItem(self, itemData):
itemIdentifier = itemData.get("itemIdentifier")
if itemIdentifier is None:
raise VanillaError("toolbar item data must contain a unique itemIdentifier string")
if itemIdentifier in self._toolbarItems:
raise VanillaError("toolbar itemIdentifier is not unique: %r" % itemIdentifier)
if itemIdentifier not in self._toolbarAllowedItemIdentifiers:
self._toolbarAllowedItemIdentifiers.append(itemIdentifier)
if itemData.get("visibleByDefault", True):
self._toolbarDefaultItemIdentifiers.append(itemIdentifier)
if itemIdentifier.startswith("NS"):
# no need to create an actual item for a standard Cocoa toolbar item
return
label = itemData.get("label")
paletteLabel = itemData.get("paletteLabel", label)
toolTip = itemData.get("toolTip", label)
imagePath = itemData.get("imagePath")
imageNamed = itemData.get("imageNamed")
imageObject = itemData.get("imageObject")
imageTemplate = itemData.get("imageTemplate")
view = itemData.get("view")
callback = itemData.get("callback", None)
# create the NSImage if needed
if imagePath is not None:
image = NSImage.alloc().initWithContentsOfFile_(imagePath)
elif imageNamed is not None:
image = NSImage.imageNamed_(imageNamed)
elif imageObject is not None:
image = imageObject
else:
image = None
toolbarItem = NSToolbarItem.alloc().initWithItemIdentifier_(itemIdentifier)
toolbarItem.setLabel_(label)
toolbarItem.setPaletteLabel_(paletteLabel)
toolbarItem.setToolTip_(toolTip)
if image is not None:
if imageTemplate is not None:
# only change the image template setting if its either True or False
image.setTemplate_(imageTemplate)
toolbarItem.setImage_(image)
elif view is not None:
toolbarItem.setView_(view)
toolbarItem.setMinSize_(view.frame().size)
toolbarItem.setMaxSize_(view.frame().size)
if callback is not None:
target = VanillaCallbackWrapper(callback)
toolbarItem.setTarget_(target)
toolbarItem.setAction_("action:")
self._toolbarCallbackWrappers[itemIdentifier] = target
if itemData.get("selectable", False):
self._toolbarSelectableItemIdentifiers.append(itemIdentifier)
self._toolbarItems[itemIdentifier] = toolbarItem
# Toolbar delegate methods
def toolbarDefaultItemIdentifiers_(self, anIdentifier):
return self._toolbarDefaultItemIdentifiers
def toolbarAllowedItemIdentifiers_(self, anIdentifier):
return self._toolbarAllowedItemIdentifiers
def toolbar_itemForItemIdentifier_willBeInsertedIntoToolbar_(self, toolbar, itemIdentifier, flag):
return self._toolbarItems.get(itemIdentifier)
def toolbarSelectableItemIdentifiers_(self, toolbar):
return self._toolbarSelectableItemIdentifiers
class FloatingWindow(Window):
"""
A window that floats above all other windows.
.. image:: /_images/FloatingWindow.png
To add a control to a window, simply set it as an attribute of the window.
::
from vanilla import FloatingWindow, Button, TextBox
class FloatingWindowDemo:
def __init__(self):
self.w = FloatingWindow((200, 70), "FloatingWindow Demo")
self.w.myButton = Button((10, 10, -10, 20), "My Button")
self.w.myTextBox = TextBox((10, 40, -10, 17), "My Text Box")
self.w.open()
FloatingWindowDemo()
No special naming is required for the attributes. However, each attribute
must have a unique name.
**posSize** Tuple of form *(left, top, width, height)* representing the position
and size of the window. It may also be a tuple of form *(width, height)*.
In this case, the window will be positioned on screen automatically.
**title** The title to be set in the title bar of the window.
**minSize** Tuple of the form *(width, height)* representing the minimum size
that the window can be resized to.
**maxSize** Tuple of the form *(width, height)* representing the maximum size
that the window can be resized to.
**textured** Boolean value representing if the window should have a textured
appearance or not.
**autosaveName** A string representing a unique name for the window. If given,
this name will be used to store the window position and size in the application
preferences.
**closable** Boolean value representing if the window should have a close button
in the title bar.
**screen** A `NSScreen`_ object indicating the screen that the window
should be drawn to. When None the window will be drawn to the main screen.
.. _NSScreen: https://developer.apple.com/documentation/appkit/nsscreen?language=objc
"""
nsWindowStyleMask = NSTitledWindowMask | NSUtilityWindowMask
nsWindowClass = NSPanel
nsWindowLevel = NSFloatingWindowLevel
def __init__(self, posSize, title="", minSize=None, maxSize=None,
textured=False, autosaveName=None, closable=True,
initiallyVisible=True, screen=None):
super(FloatingWindow, self).__init__(posSize, title, minSize, maxSize,
textured, autosaveName, closable, initiallyVisible=initiallyVisible, screen=screen)
self._window.setBecomesKeyOnlyIfNeeded_(True)
def show(self):
"""
Show the window if it is hidden.
"""
# don't make key!
self._window.orderFront_(None)
class HUDFloatingWindow(FloatingWindow):
"""
A window that floats above all other windows and has the HUD appearance.
.. image:: /_images/HUDFloatingWindow.png
To add a control to a window, simply set it as an attribute of the window.
::
from vanilla import *
class HUDFloatingWindowDemo:
def __init__(self):
self.w = HUDFloatingWindow((200, 70), "HUDFloatingWindow Demo")
self.w.myButton = Button((10, 10, -10, 20), "My Button")
self.w.myTextBox = TextBox((10, 40, -10, 17), "My Text Box")
self.w.open()
HUDFloatingWindowDemo()
No special naming is required for the attributes. However, each attribute
must have a unique name.
**posSize** Tuple of form *(left, top, width, height)* representing the position
and size of the window. It may also be a tuple of form *(width, height)*.
In this case, the window will be positioned on screen automatically.
**title** The title to be set in the title bar of the window.
**minSize** Tuple of the form *(width, height)* representing the minimum size
that the window can be resized to.
**maxSize** Tuple of the form *(width, height)* representing the maximum size
that the window can be resized to.
**textured** Boolean value representing if the window should have a textured
appearance or not.
**autosaveName** A string representing a unique name for the window.
If given, this name will be used to store the window position and size in
the application preferences.
**closable** Boolean value representing if the window should have a close button
in the title bar.
**screen** A `NSScreen`_ object indicating the screen that the window
should be drawn to. When None the window will be drawn to the main screen.
.. _NSScreen: https://developer.apple.com/documentation/appkit/nsscreen?language=objc
"""
nsWindowStyleMask = NSHUDWindowMask | NSUtilityWindowMask | NSTitledWindowMask | NSBorderlessWindowMask
class Sheet(Window):
"""
A window that is attached to another window.
.. image:: /_images/Sheet.png
To add a control to a sheet, simply set it as an attribute of the sheet.::
from vanilla import Window, Sheet, Button
class SheetDemo:
def __init__(self):
self.w = Window((240, 140), "Sheet Demo")
self.w.openSheet = Button((10, -30, -10, 20),
"open sheet", callback=self.openSheetCallback)
self.w.open()
def openSheetCallback(self, sender):
self.sheet = Sheet((160, 70), self.w)
self.sheet.closeSheet = Button((10, -30, -10, 20),
"close sheet", callback=self.closeSheetCallback)
self.sheet.open()
def closeSheetCallback(self, sender):
self.sheet.close()
del self.sheet
SheetDemo()
No special naming is required for the attributes. However, each attribute
must have a unique name.
**posSize** Tuple of form *(width, height)* representing the size of the sheet.
**parentWindow** The window that the sheet should be attached to.
**minSize** Tuple of the form *(width, height)* representing the minimum size
that the sheet can be resized to.
**maxSize** Tuple of the form *(width, height)* representing the maximum size
that the sheet can be resized to.
**autosaveName** A string representing a unique name for the sheet. If given,
this name will be used to store the sheet size in the application preferences.
"""
def __init__(self, posSize, parentWindow, minSize=None, maxSize=None,
autosaveName=None):
if isinstance(parentWindow, Window):
parentWindow = parentWindow._window
self.parentWindow = parentWindow
textured = bool(parentWindow.styleMask() & NSTexturedBackgroundWindowMask)
super(Sheet, self).__init__(posSize, "", minSize, maxSize, textured,
autosaveName=autosaveName)
def open(self):
"""
Open the window.
"""
parentWindow = self.parentWindow
NSApp().beginSheet_modalForWindow_modalDelegate_didEndSelector_contextInfo_(
self._window, parentWindow, None, None, 0)
# See Window.open():
self.retain()
self._validateMinMaxSize()
| {
"content_hash": "16a71ab8a80fff5b30bd311a2e787144",
"timestamp": "",
"source": "github",
"line_count": 1166,
"max_line_length": 841,
"avg_line_length": 44.343910806174954,
"alnum_prop": 0.5449956483899042,
"repo_name": "typesupply/vanilla",
"id": "485c1729ae19f9a55eb5d4bfd8e82b9c3073f282",
"size": "51705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Lib/vanilla/vanillaWindows.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "331750"
}
],
"symlink_target": ""
} |
import os
from pyface.confirmation_dialog import confirm
from pyface.constant import YES
from pyface.message_dialog import information
from pyface.tasks.action.task_action import TaskAction
from traitsui.menu import Action
from pychron.envisage.resources import icon
from pychron.envisage.ui_actions import UIAction, UITaskAction
class IdentifyPeaksDemoAction(TaskAction):
name = 'Id peaks'
method = 'identify_peaks'
class SavePipelineTemplateAction(TaskAction):
name = 'Save Pipeline Template'
method = 'save_pipeline_template'
class RunAction(TaskAction):
name = 'Run'
method = 'run'
image = icon('start')
visible_name = 'engine.run_enabled'
accelerator = 'Ctrl+R'
class ResumeAction(TaskAction):
name = 'Resume'
method = 'resume'
image = icon('edit-redo-3')
visible_name = 'engine.resume_enabled'
class RunFromAction(TaskAction):
name = 'Run From'
method = 'run_from'
image = icon('start')
class ResetAction(TaskAction):
name = 'Reset'
method = 'reset'
image = icon('arrow_refresh')
class ClearAction(TaskAction):
name = 'Clear'
method = 'clear'
image = icon('clear')
class SwitchToBrowserAction(TaskAction):
name = 'To Browser'
method = 'switch_to_browser'
image = icon('start')
class ConfigureRecallAction(UITaskAction):
name = 'Recall Configuration...'
method = 'configure_recall'
image = icon('cog')
# class ConfigureAnalysesTableAction(TaskAction):
# name = 'Configure Analyses Table'
# dname = 'Configure Analyses Table'
# method = 'configure_analyses_table'
# image = icon('cog')
#
#
# class ConfigureSampleTableAction(TaskAction):
# name = 'Configure Sample Table'
# dname = 'Configure Sample Table'
# method = 'configure_sample_table'
# image = icon('cog')
class LoadReviewStatusAction(TaskAction):
name = 'Review Status'
method = 'load_review_status'
image = icon('check_boxes')
class EditAnalysisAction(TaskAction):
name = 'Edit Analysis'
method = 'edit_analysis'
image = icon('application-form-edit')
class DiffViewAction(TaskAction):
name = 'Diff View'
method = 'diff_analysis'
image = icon('edit_diff')
enabled_name = 'diff_enabled'
class TabularViewAction(TaskAction):
name = 'Tabular View'
method = 'tabular_view'
image = icon('table')
class PipelineAction(UIAction):
def perform(self, event):
app = event.task.window.application
task = app.get_task('pychron.pipeline.task')
if hasattr(task, self.action):
getattr(task, self.action)()
class BrowserAction(Action):
_task_id = 'pychron.browser.task'
def perform(self, event):
task = self._get_task(event)
if hasattr(task, self.action):
getattr(task, self.action)()
def _get_task(self, event):
app = event.task.window.application
task = app.get_task(self._task_id)
return task
class RecallAction(PipelineAction):
name = 'Recall...'
action = 'pipeline_recall'
class InterpretedAgeRecallAction(PipelineAction):
name = 'Interpreted Age Recall...'
action = 'pipeline_interpreted_age_recall'
class TimeViewBrowserAction(BrowserAction):
name = 'Time View Recall...'
action = 'open_time_view_browser'
class ReductionAction(PipelineAction):
pass
class IsoEvolutionAction(PipelineAction):
name = 'Isotope Evolutions'
action = 'set_isotope_evolutions_template'
class BlanksAction(PipelineAction):
name = 'Blanks'
action = 'set_blanks_template'
class ICFactorAction(PipelineAction):
name = 'ICFactor'
action = 'set_icfactor_template'
class FluxAction(PipelineAction):
name = 'Flux'
action = 'set_flux_template'
class FreezeProductionRatios(PipelineAction):
name = 'Freeze Production Ratios'
action = 'freeze_production_ratios'
class FreezeFlux(PipelineAction):
name = 'Freeze Flux'
action = 'freeze_flux'
class AnalysisTableAction(PipelineAction):
name = 'Analysis Table'
action = 'set_analysis_table_template'
class PipelineRecallAction(TaskAction):
name = 'Recall'
method = 'pipeline_recall'
class ClearAnalysisSetsAction(UIAction):
name = 'Clear Analysis Sets'
def perform(self, event):
from pychron.paths import paths
p = paths.hidden_path('analysis_sets')
if os.path.isfile(p):
if confirm(None, 'Are you sure you want to clear the Analysis Sets?') == YES:
os.remove(p)
else:
information(None, 'No Analysis Sets to remove')
# ============= Plotting Actions =============================================
class ResetFactoryDefaultsAction(UIAction):
name = 'Reset Factory Defaults'
def perform(self, event):
from pychron.paths import paths
if confirm(None, 'Are you sure you want to reset to Factory Default settings'):
paths.reset_plot_factory_defaults()
class PlotAction(PipelineAction):
pass
class IdeogramAction(PlotAction):
name = 'Ideogram'
action = 'set_ideogram_template'
image = icon('histogram')
accelerator = 'Ctrl+i'
class SubgroupIdeogramAction(PlotAction):
name = 'SubGroup Ideogram'
action = 'set_subgroup_ideogram_template'
image = icon('histogram')
class HybridIdeogramAction(PlotAction):
name = 'Hybrid Ideogram'
action = 'set_hybrid_ideogram_template'
image = icon('histogram')
class HistoryIdeogramAction(PlotAction):
name = 'History Ideogram'
action = 'set_history_ideogram_template'
image = icon('histogram')
class SpectrumAction(PlotAction):
name = 'Spectrum'
action = 'set_spectrum_template'
accelerator = 'Ctrl+D'
# image = icon('histogram')
class IsochronAction(PlotAction):
name = 'Isochron'
action = 'set_isochron_template'
# image = icon('histogram')
class InverseIsochronAction(PlotAction):
name = 'InverseIsochron'
action = 'set_inverse_isochron_template'
class SeriesAction(PlotAction):
name = 'Series'
action = 'set_series_template'
id = 'pychron.series'
class VerticalFluxAction(PipelineAction):
name = 'Vertical Flux'
action = 'set_vertical_flux_template'
class ExtractionAction(UIAction):
name = 'Extraction Results...'
def perform(self, event):
app = event.task.window.application
windows = app.windows
for tid in ('pychron.browser.task', 'pychron.pipeline.task'):
for win in windows:
task = win.active_task
if task and task.id == tid:
getattr(task, 'show_extraction_graph')()
break
class MassSpecReducedAction(PipelineAction):
name = 'Mass Spec Reduced Transfer'
action = 'mass_spec_reduced_transfer'
# ============= Quick Series ====================================
# class LastNAnalysesSeriesAction(PipelineAction):
# name = 'Last N...'
# action = 'set_last_n_analyses_template'
#
#
# class LastNHoursSeriesAction(PipelineAction):
# name = 'Last N Hours...'
# action = 'set_last_n_hours_template'
#
#
# class LastDaySeriesAction(PipelineAction):
# name = 'Last Day'
# action = 'set_last_day_template'
#
#
# class LastWeekSeriesAction(PipelineAction):
# name = 'Last Week'
# action = 'set_last_week_template'
#
#
# class LastMonthSeriesAction(PipelineAction):
# name = 'Last Month'
# action = 'set_last_month_template'
# ============= tag =============================================
class TagAction(TaskAction):
name = 'Tag...'
dname = 'Tag'
# accelerator = 'Ctrl+Shift+t'
method = 'set_tag'
image = icon('tag-blue-add')
id = 'pychron.tag'
class SetInvalidAction(TaskAction):
name = 'Set Invalid'
method = 'set_invalid'
class SetFilteringTagAction(TaskAction):
name = 'Set Filtering Tag'
method = 'set_filtering_tag'
# ============= Interperted Age =================================
class SetInterpretedAgeAction(TaskAction):
name = 'Set Interpreted Age'
method = 'set_interpreted_age'
enabled_name = 'set_interpreted_enabled'
class SavePDFAction(TaskAction):
name = 'Save PDF'
method = 'save_figure_pdf'
image = icon('file_pdf')
class SaveFigureAction(TaskAction):
name = 'Save Figure'
method = 'save_figure'
class SaveTableAction(TaskAction):
name = 'Save Table'
method = 'save_table'
image = icon('table_save')
enabled_name = 'set_interpreted_enabled'
# ============= EOF =============================================
| {
"content_hash": "78c5e0a2267737683d096c27d7d96374",
"timestamp": "",
"source": "github",
"line_count": 357,
"max_line_length": 89,
"avg_line_length": 24.0812324929972,
"alnum_prop": 0.6485983482610213,
"repo_name": "UManPychron/pychron",
"id": "9d06cb93b3e17eb33bb0d105efe4d1be63c8128b",
"size": "9397",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pychron/pipeline/tasks/actions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "279"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "40346"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10234954"
},
{
"name": "Shell",
"bytes": "10753"
}
],
"symlink_target": ""
} |
import operator
def create_summary(events, node_meta):
ob_types = node_meta['observation_types']
sensor_summaries = {ob_type: make_sensor_summary(ob_type, events)
for ob_type in ob_types}
return {
'version': node_meta['version'],
'sensors': sensor_summaries
}
def make_sensor_summary(ob_type, events):
get = operator.itemgetter(ob_type)
return {
'observations': [{'val': event[ob_type], 'time': event['time']}
for event in events],
'average': sum(map(get, events))/len(events)
}
| {
"content_hash": "313a2685ee095d8e018a6346686f8541",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 71,
"avg_line_length": 29.6,
"alnum_prop": 0.581081081081081,
"repo_name": "UrbanCCD-UChicago/plenario-stream",
"id": "2f191f635cceffc066673be90ed8e05759d7028f",
"size": "592",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "summarizer/materialize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2356"
},
{
"name": "C",
"bytes": "4176736"
},
{
"name": "C++",
"bytes": "51877"
},
{
"name": "CSS",
"bytes": "1647"
},
{
"name": "Groff",
"bytes": "7236"
},
{
"name": "HTML",
"bytes": "285074"
},
{
"name": "JavaScript",
"bytes": "157601"
},
{
"name": "Jupyter Notebook",
"bytes": "2888"
},
{
"name": "Lua",
"bytes": "11887"
},
{
"name": "M4",
"bytes": "56418"
},
{
"name": "Makefile",
"bytes": "44329"
},
{
"name": "Perl",
"bytes": "175311"
},
{
"name": "Python",
"bytes": "15717"
},
{
"name": "Ruby",
"bytes": "70862"
},
{
"name": "Shell",
"bytes": "35336"
},
{
"name": "Smarty",
"bytes": "1047"
},
{
"name": "Tcl",
"bytes": "464534"
},
{
"name": "XSLT",
"bytes": "303"
}
],
"symlink_target": ""
} |
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'hygrometry'
copyright = u'2016, Russell Valentine'
author = u'Russell Valentine'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'bizstyle'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'hygrometrydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'hygrometry.tex', u'hygrometry Documentation',
u'Russell Valentine', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'hygrometry', u'hygrometry Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'hygrometry', u'hygrometry Documentation',
author, 'hygrometry', 'One line description of project.',
'Miscellaneous'),
]
| {
"content_hash": "f2f71654d8968022402081fb24b49f3a",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 78,
"avg_line_length": 29.384615384615383,
"alnum_prop": 0.6625416468348405,
"repo_name": "bluthen/hygrometry",
"id": "174bfcf071c5e45953c13ad8cfff3118e6b28e6d",
"size": "4865",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11426"
}
],
"symlink_target": ""
} |
from typing import Callable, TYPE_CHECKING, Optional, Union
from kivy.app import App
from kivy.factory import Factory
from kivy.properties import ObjectProperty
from kivy.lang import Builder
from decimal import Decimal
from kivy.clock import Clock
from electrum_ltc.util import InvalidPassword
from electrum_ltc.gui.kivy.i18n import _
if TYPE_CHECKING:
from ...main_window import ElectrumWindow
from electrum_ltc.wallet import Abstract_Wallet
from electrum_ltc.storage import WalletStorage
Builder.load_string('''
<PasswordDialog@Popup>
id: popup
title: 'Vialectrum'
message: ''
basename:''
is_change: False
BoxLayout:
size_hint: 1, 1
orientation: 'vertical'
spacing: '12dp'
padding: '12dp'
BoxLayout:
size_hint: 1, None
orientation: 'horizontal'
height: '40dp'
Label:
size_hint: 0.85, None
height: '40dp'
font_size: '20dp'
text: _('Wallet') + ': ' + root.basename
text_size: self.width, None
IconButton:
size_hint: 0.15, None
height: '40dp'
icon: 'atlas://electrum_ltc/gui/kivy/theming/light/btn_create_account'
on_release: root.select_file()
disabled: root.is_change
opacity: 0 if root.is_change else 1
Widget:
size_hint: 1, 0.05
Label:
size_hint: 0.70, None
font_size: '20dp'
text: root.message
text_size: self.width, None
Widget:
size_hint: 1, 0.05
BoxLayout:
orientation: 'horizontal'
id: box_generic_password
size_hint_y: 0.05
height: '40dp'
TextInput:
height: '40dp'
id: textinput_generic_password
valign: 'center'
multiline: False
on_text_validate:
popup.on_password(self.text)
password: True
size_hint: 0.85, None
unfocus_on_touch: False
focus: True
IconButton:
height: '40dp'
size_hint: 0.15, None
icon: 'atlas://electrum_ltc/gui/kivy/theming/light/eye1'
icon_size: '40dp'
on_release:
textinput_generic_password.password = False if textinput_generic_password.password else True
Widget:
size_hint: 1, 1
<PincodeDialog@Popup>
id: popup
title: 'Vialectrum'
message: ''
basename:''
BoxLayout:
size_hint: 1, 1
orientation: 'vertical'
Widget:
size_hint: 1, 0.05
Label:
size_hint: 0.70, None
font_size: '20dp'
text: root.message
text_size: self.width, None
Widget:
size_hint: 1, 0.05
Label:
id: label_pin
size_hint_y: 0.05
font_size: '50dp'
text: '*'*len(kb.password) + '-'*(6-len(kb.password))
size: self.texture_size
Widget:
size_hint: 1, 0.05
GridLayout:
id: kb
size_hint: 1, None
height: self.minimum_height
update_amount: popup.update_password
password: ''
on_password: popup.on_password(self.password)
spacing: '2dp'
cols: 3
KButton:
text: '1'
KButton:
text: '2'
KButton:
text: '3'
KButton:
text: '4'
KButton:
text: '5'
KButton:
text: '6'
KButton:
text: '7'
KButton:
text: '8'
KButton:
text: '9'
KButton:
text: 'Clear'
KButton:
text: '0'
KButton:
text: '<'
''')
class AbstractPasswordDialog:
def init(self, app: 'ElectrumWindow', *,
check_password = None,
on_success: Callable = None, on_failure: Callable = None,
is_change: bool = False,
is_password: bool = True, # whether this is for a generic password or for a numeric PIN
has_password: bool = False,
message: str = '',
basename:str=''):
self.app = app
self.pw_check = check_password
self.message = message
self.on_success = on_success
self.on_failure = on_failure
self.success = False
self.is_change = is_change
self.pw = None
self.new_password = None
self.title = 'Vialectrum'
self.level = 1 if is_change and not has_password else 0
self.basename = basename
self.update_screen()
def update_screen(self):
self.clear_password()
if self.level == 0 and self.message == '':
self.message = self.enter_pw_message
elif self.level == 1:
self.message = self.enter_new_pw_message
elif self.level == 2:
self.message = self.confirm_new_pw_message
def check_password(self, password):
if self.level > 0:
return True
try:
self.pw_check(password)
return True
except InvalidPassword as e:
return False
def on_dismiss(self):
if self.level == 1 and self.allow_disable and self.on_success:
self.on_success(self.pw, None)
return False
if not self.success:
if self.on_failure:
self.on_failure()
else:
# keep dialog open
return True
else:
if self.on_success:
args = (self.pw, self.new_password) if self.is_change else (self.pw,)
Clock.schedule_once(lambda dt: self.on_success(*args), 0.1)
def update_password(self, c):
kb = self.ids.kb
text = kb.password
if c == '<':
text = text[:-1]
elif c == 'Clear':
text = ''
else:
text += c
kb.password = text
def do_check(self, pw):
if self.check_password(pw):
if self.is_change is False:
self.success = True
self.pw = pw
self.message = _('Please wait...')
self.dismiss()
elif self.level == 0:
self.level = 1
self.pw = pw
self.update_screen()
elif self.level == 1:
self.level = 2
self.new_password = pw
self.update_screen()
elif self.level == 2:
self.success = pw == self.new_password
self.dismiss()
else:
self.app.show_error(self.wrong_password_message)
self.clear_password()
class PasswordDialog(AbstractPasswordDialog, Factory.Popup):
enter_pw_message = _('Enter your password')
enter_new_pw_message = _('Enter new password')
confirm_new_pw_message = _('Confirm new password')
wrong_password_message = _('Wrong password')
allow_disable = False
def clear_password(self):
self.ids.textinput_generic_password.text = ''
def on_password(self, pw: str):
# if setting new generic password, enforce min length
if self.level > 0:
if len(pw) < 6:
self.app.show_error(_('Password is too short (min {} characters)').format(6))
return
# don't enforce minimum length on existing
self.do_check(pw)
def select_file(self):
self.app.wallets_dialog()
class PincodeDialog(AbstractPasswordDialog, Factory.Popup):
enter_pw_message = _('Enter your PIN')
enter_new_pw_message = _('Enter new PIN')
confirm_new_pw_message = _('Confirm new PIN')
wrong_password_message = _('Wrong PIN')
allow_disable = True
def clear_password(self):
self.ids.kb.password = ''
def on_password(self, pw: str):
# PIN codes are exactly 6 chars
if len(pw) >= 6:
self.do_check(pw)
| {
"content_hash": "e151ae61ac0d9619b9aa56aa01765076",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 112,
"avg_line_length": 30.57875457875458,
"alnum_prop": 0.5103018687110685,
"repo_name": "vialectrum/vialectrum",
"id": "7e16de024816b608352ada6aeaa50c411b29fae7",
"size": "8348",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "electrum_ltc/gui/kivy/uix/dialogs/password_dialog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "Java",
"bytes": "1574"
},
{
"name": "Makefile",
"bytes": "839"
},
{
"name": "NSIS",
"bytes": "7496"
},
{
"name": "Python",
"bytes": "1895270"
},
{
"name": "Shell",
"bytes": "16219"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.contrib import admin
from django.views import defaults as default_views
from django.views.generic import RedirectView, TemplateView
urlpatterns = [
url(r'^', include('apps.customer.urls')),
url(r'^favicon\.ico$', RedirectView.as_view(
url=settings.STATIC_URL + 'img/compressed/favicon.ico'),
name='favicon.ico'),
url(r'^robots\.txt$', TemplateView.as_view(
template_name='robots.txt', content_type='text/plain'),
name='robots.txt'),
url(r'^sitemap\.xml$', TemplateView.as_view(
template_name='sitemap.xml', content_type='text/xml'),
name='sitemap-xml'),
]
urlpatterns += i18n_patterns(
url(settings.ADMIN_URL, include(admin.site.urls)),
url(r'^administrator/', include('apps.dashboard.urls')),
# url(r'^chaining/', include('smart_selects.urls')),
)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^400/$', default_views.bad_request,
kwargs={'exception': Exception("Bad Request!")}),
url(r'^403/$', default_views.permission_denied,
kwargs={'exception': Exception("Permission Denied")}),
url(r'^404/$', default_views.page_not_found,
kwargs={'exception': Exception("Page not Found")}),
url(r'^500/$', default_views.server_error),
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| {
"content_hash": "d7b0da02099c02e12d53c82ec6bcc212",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 78,
"avg_line_length": 42.35,
"alnum_prop": 0.6694214876033058,
"repo_name": "jonaqp/heroku",
"id": "bd2e04be830c84ab4d8527c6796b5857d3333612",
"size": "1694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "40949"
},
{
"name": "HTML",
"bytes": "329134"
},
{
"name": "JavaScript",
"bytes": "78825"
},
{
"name": "Python",
"bytes": "182554"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from django.forms import SelectMultiple
from django.db import models
from .models import Product, Tag
from categories.models import Category
class ProductAdmin(admin.ModelAdmin):
formfield_overrides = {
models.ManyToManyField: {'widget': SelectMultiple(attrs={'size':'20'})},
}
class TagAdmin(admin.ModelAdmin):
ordering = ['category__name', 'name', 'value']
admin.site.register(Tag, TagAdmin)
admin.site.register(Product, ProductAdmin)
| {
"content_hash": "d2acf4395271a34a8ccc694d18c95bb7",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 81,
"avg_line_length": 29.176470588235293,
"alnum_prop": 0.7459677419354839,
"repo_name": "giovannicode/djangoseller",
"id": "90bff380936326ef776df5cf66929fd1e748bef8",
"size": "496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "products/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "14921"
},
{
"name": "HTML",
"bytes": "13745"
},
{
"name": "JavaScript",
"bytes": "5261"
},
{
"name": "Python",
"bytes": "64126"
},
{
"name": "Shell",
"bytes": "5094"
}
],
"symlink_target": ""
} |
import random
# ============= standard library imports ========================
from numpy import linspace
from numpy.core.umath import exp
# ============= enthought library imports =======================
from traitsui.api import View, Item, EnumEditor, Group, HGroup, spring, ButtonEditor
# ============= local library imports ==========================
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.spectrometer.jobs.sweep import BaseSweep
def multi_peak_generator(values):
for v in values:
m = 0.1
if 4.8 <= v <= 5.2:
m = 3
elif 5.5 <= v <= 5.8:
m = 9
elif 6.1 <= v <= 7:
m = 6
yield m + random.random() / 5.0
def pseudo_peak(center, start, stop, step, magnitude=500, peak_width=0.004, channels=1):
x = linspace(start, stop, step)
gaussian = lambda x: magnitude * exp(-(((center - x) / peak_width) ** 2))
for i, d in enumerate(gaussian(x)):
if abs(center - x[i]) < peak_width:
# d = magnitude
# for j in xrange(channels):
d = magnitude + magnitude / 50.0 * random.random()
yield [d * (j + 1) for j in range(channels)]
class AccelVoltageSweep(BaseSweep):
def _step(self, v):
self.spectrometer.source.nominal_hv = v
class MagnetSweep(BaseSweep):
_peak_generator = None
def _make_pseudo(self, values, channels):
self._peak_generator = pseudo_peak(
values[len(values) / 2] + 0.001,
values[0],
values[-1],
len(values),
channels,
)
def _step_intensity(self):
if self._peak_generator:
resp = next(self._peak_generator)
else:
resp = super(MagnetSweep, self)._step_intensity()
return resp
def _step(self, v):
self.spectrometer.magnet.set_dac(
v,
verbose=self.verbose,
settling_time=0,
# settling_time=self.integration_time * 2,
use_dac_changed=False,
)
if hasattr(self.spectrometer, "trigger_acq"):
self.spectrometer.trigger_acq()
self.spectrometer.settle()
def _do_sweep(self, sm, em, stm, directions=None, map_mass=True):
if map_mass:
spec = self.spectrometer
mag = spec.magnet
detname = self.reference_detector.name
ds = spec.correct_dac(
self.reference_detector, mag.map_mass_to_dac(sm, detname)
)
de = spec.correct_dac(
self.reference_detector, mag.map_mass_to_dac(em, detname)
)
massdev = abs(sm - em)
dacdev = abs(ds - de)
stm = stm / float(massdev) * dacdev
sm, em = ds, de
return super(MagnetSweep, self)._do_sweep(sm, em, stm, directions)
def edit_view(self):
v = okcancel_view(
Group(
Item("reference_detector", editor=EnumEditor(name="detectors")),
Item("integration_time", label="Integration (s)"),
label="Magnet Scan",
show_border=True,
),
title=self.title,
)
return v
def traits_view(self):
v = View(
Group(
Item("reference_detector", editor=EnumEditor(name="detectors")),
Item(
"start_value", label="Start Mass", tooltip="Start scan at this mass"
),
Item(
"stop_value",
label="Stop Mass",
tooltip="Stop scan when magnet reaches this mass",
),
Item(
"step_value",
label="Step Mass",
tooltip="Step from Start to Stop by this amount",
),
Item("integration_time", label="Integration (s)"),
HGroup(
spring,
Item(
"execute_button",
editor=ButtonEditor(label_value="execute_label"),
show_label=False,
),
),
label="Magnet Scan",
show_border=True,
)
)
return v
# ============= EOF =============================================
| {
"content_hash": "978c91de0826bd56da65a67ebad746ca",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 88,
"avg_line_length": 30.34931506849315,
"alnum_prop": 0.48408937034529453,
"repo_name": "USGSDenverPychron/pychron",
"id": "a54894b9a74df692bd466c3c81119f82031baf97",
"size": "5165",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/spectrometer/jobs/magnet_sweep.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
} |
from django.conf.urls import include, url
from django.contrib import admin
from account.views import LoginView, AboutView, TermsView, PrivacyView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^$', LoginView.as_view(), name='login'),
url(r'^about/$', AboutView.as_view(), name='about'),
url(r'^terms/$', TermsView.as_view(), name='terms'),
url(r'^privacy/$', PrivacyView.as_view(), name='privacy'),
url(r'^', include('account.urls', namespace='account'))
]
| {
"content_hash": "5a68c30ba58b830865b98cec0f871749",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 70,
"avg_line_length": 39.642857142857146,
"alnum_prop": 0.6594594594594595,
"repo_name": "ragnarok22/socialProject",
"id": "e781e68c4e3d4c58773294b7584564953ed094fb",
"size": "555",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "socialProject/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1106"
},
{
"name": "HTML",
"bytes": "13934"
},
{
"name": "JavaScript",
"bytes": "1508"
},
{
"name": "Python",
"bytes": "8267"
}
],
"symlink_target": ""
} |
import os
import logging
import six
import numpy as np
from .... import core
from ..core.strategy import Strategy
from ....log_helper import get_logger
__all__ = ['MKLDNNPostTrainingQuantStrategy']
_logger = get_logger(
__name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s')
class MKLDNNPostTrainingQuantStrategy(Strategy):
"""
The strategy for MKL-DNN Post Training quantization strategy.
"""
def __init__(self,
int8_model_save_path=None,
fp32_model_path=None,
cpu_math_library_num_threads=1):
"""
Args:
int8_model_save_path(str): int8_model_save_path is used to save an int8 ProgramDesc
with fp32 weights which is used for MKL-DNN int8 inference. For post training quantization,
MKLDNNPostTrainingQuantStrategy only supports converting a fp32 ProgramDesc
with fp32 weights to an int8 ProgramDesc with fp32 weights now. The saved
int8 ProgramDesc with fp32 weights only can be executed with MKL-DNN enabled.
None means it doesn't save int8 ProgramDesc with fp32 weights. default: None.
fp32_model_path(str): fp32_model_path is used to load an original fp32 ProgramDesc with fp32 weights.
None means it doesn't have a fp32 ProgramDesc with fp32 weights. default: None.
cpu_math_library_num_threads(int): The number of cpu math library threads which is used on
MKLDNNPostTrainingQuantStrategy. 1 means it only uses one cpu math library
thread. default: 1
"""
super(MKLDNNPostTrainingQuantStrategy, self).__init__(0, 0)
self.int8_model_save_path = int8_model_save_path
if fp32_model_path is None:
raise Exception("fp32_model_path is None")
self.fp32_model_path = fp32_model_path
self.cpu_math_library_num_threads = cpu_math_library_num_threads
def on_compression_begin(self, context):
"""
Prepare the data and quantify the model
"""
super(MKLDNNPostTrainingQuantStrategy,
self).on_compression_begin(context)
_logger.info('InferQuantStrategy::on_compression_begin')
# Prepare the Analysis Config
infer_config = core.AnalysisConfig("AnalysisConfig")
infer_config.switch_ir_optim(True)
infer_config.disable_gpu()
infer_config.set_model(self.fp32_model_path)
infer_config.enable_mkldnn()
infer_config.set_cpu_math_library_num_threads(
self.cpu_math_library_num_threads)
# Prepare the data for calculating the quantization scales
warmup_reader = context.eval_reader()
if six.PY2:
data = warmup_reader.next()
if six.PY3:
data = warmup_reader.__next__()
num_images = len(data)
image_data = [img.tolist() for (img, _) in data]
image_data = np.array(image_data).astype("float32").reshape(
[num_images, ] + list(data[0][0].shape))
image_data = image_data.ravel()
images = core.PaddleTensor(image_data, "x")
images.shape = [num_images, ] + list(data[0][0].shape)
label_data = [label for (_, label) in data]
labels = core.PaddleTensor(
np.array(label_data).astype("int64").reshape([num_images, 1]), "y")
warmup_data = [images, labels]
# Enable the INT8 Quantization
infer_config.enable_quantizer()
infer_config.quantizer_config().set_quant_data(warmup_data)
infer_config.quantizer_config().set_quant_batch_size(num_images)
# Run INT8 MKL-DNN Quantization
predictor = core.create_paddle_predictor(infer_config)
if self.int8_model_save_path:
if not os.path.exists(self.int8_model_save_path):
os.makedirs(self.int8_model_save_path)
predictor.SaveOptimModel(self.int8_model_save_path)
_logger.info(
'Finish MKLDNNPostTrainingQuantStrategy::on_compresseion_begin')
| {
"content_hash": "dfc75447e398db3f1da34e5cc204ca96",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 115,
"avg_line_length": 41.888888888888886,
"alnum_prop": 0.6245478659271763,
"repo_name": "chengduoZH/Paddle",
"id": "ad5ef33bf770395efd50fce06021e7ec7c4db4af",
"size": "4758",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/contrib/slim/quantization/mkldnn_post_training_strategy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32490"
},
{
"name": "C++",
"bytes": "10146609"
},
{
"name": "CMake",
"bytes": "291349"
},
{
"name": "Cuda",
"bytes": "1192566"
},
{
"name": "Dockerfile",
"bytes": "10002"
},
{
"name": "Python",
"bytes": "7124331"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "200906"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import sys
from os.path import abspath, dirname, join
# Make sure we get the version of this copy of Django
sys.path.insert(1, dirname(dirname(abspath(__file__))))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(abspath(join(dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ["djangodocs", "sphinx.ext.intersphinx"]
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General substitutions.
project = 'Django'
copyright = 'Django Software Foundation and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.7'
# The full version, including alpha/beta/rc tags.
try:
from django import VERSION, get_version
except ImportError:
release = version
else:
def django_release():
pep386ver = get_version()
if VERSION[3:5] == ('alpha', 0) and 'dev' not in pep386ver:
return pep386ver + '.dev'
return pep386ver
release = django_release()
# The "development version" of Django
django_next_version = '1.7'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# Location for .po/.mo translation files used when language is set
locale_dirs = ['locale/']
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Links to Python's docs should reference the most recent version of the 2.x
# branch, which is located at this URL.
intersphinx_mapping = {
'python': ('http://docs.python.org/2.7', None),
'sphinx': ('http://sphinx.pocoo.org/', None),
'six': ('http://pythonhosted.org/six/', None),
'simplejson': ('http://simplejson.readthedocs.org/en/latest/', None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# HTML translator class for the builder
html_translator_class = "djangodocs.DjangoHTMLTranslator"
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Djangodoc'
modindex_common_prefix = ["django."]
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
'preamble': ('\\DeclareUnicodeCharacter{2264}{\\ensuremath{\\le}}'
'\\DeclareUnicodeCharacter{2265}{\\ensuremath{\\ge}}')
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
#latex_documents = []
latex_documents = [
('contents', 'django.tex', 'Django Documentation',
'Django Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('contents', 'django', 'Django Documentation', ['Django Software Foundation'], 1)
]
# -- Options for Texinfo output ------------------------------------------------
# List of tuples (startdocname, targetname, title, author, dir_entry,
# description, category, toctree_only)
texinfo_documents=[(
master_doc, "django", "", "", "Django",
"Documentation of the Django framework", "Web development", False
)]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'Django'
epub_author = 'Django Software Foundation'
epub_publisher = 'Django Software Foundation'
epub_copyright = '2010, Django Software Foundation'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
| {
"content_hash": "ad452febf8bd945d6f22acc5ab1f8a97",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 85,
"avg_line_length": 32.32871972318339,
"alnum_prop": 0.6989189767740555,
"repo_name": "denisenkom/django",
"id": "0c35e935e6f014ffc1f835977d72fc7f04214be3",
"size": "9829",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "50381"
},
{
"name": "JavaScript",
"bytes": "100648"
},
{
"name": "Python",
"bytes": "8801295"
},
{
"name": "Shell",
"bytes": "12135"
}
],
"symlink_target": ""
} |
from boto.s3 import user
from boto.s3 import key
from boto import handler
import xml.sax
class CompleteMultiPartUpload(object):
"""
Represents a completed MultiPart Upload. Contains the
following useful attributes:
* location - The URI of the completed upload
* bucket_name - The name of the bucket in which the upload
is contained
* key_name - The name of the new, completed key
* etag - The MD5 hash of the completed, combined upload
* version_id - The version_id of the completed upload
* encrypted - The value of the encryption header
"""
def __init__(self, bucket=None):
self.bucket = bucket
self.location = None
self.bucket_name = None
self.key_name = None
self.etag = None
self.version_id = None
self.encrypted = None
def __repr__(self):
return '<CompleteMultiPartUpload: %s.%s>' % (self.bucket_name,
self.key_name)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Location':
self.location = value
elif name == 'Bucket':
self.bucket_name = value
elif name == 'Key':
self.key_name = value
elif name == 'ETag':
self.etag = value
else:
setattr(self, name, value)
class Part(object):
"""
Represents a single part in a MultiPart upload.
Attributes include:
* part_number - The integer part number
* last_modified - The last modified date of this part
* etag - The MD5 hash of this part
* size - The size, in bytes, of this part
"""
def __init__(self, bucket=None):
self.bucket = bucket
self.part_number = None
self.last_modified = None
self.etag = None
self.size = None
def __repr__(self):
if isinstance(self.part_number, int):
return '<Part %d>' % self.part_number
else:
return '<Part %s>' % None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'PartNumber':
self.part_number = int(value)
elif name == 'LastModified':
self.last_modified = value
elif name == 'ETag':
self.etag = value
elif name == 'Size':
self.size = int(value)
else:
setattr(self, name, value)
def part_lister(mpupload, part_number_marker=None):
"""
A generator function for listing parts of a multipart upload.
"""
more_results = True
part = None
while more_results:
parts = mpupload.get_all_parts(None, part_number_marker)
for part in parts:
yield part
part_number_marker = mpupload.next_part_number_marker
more_results = mpupload.is_truncated
class MultiPartUpload(object):
"""
Represents a MultiPart Upload operation.
"""
def __init__(self, bucket=None):
self.bucket = bucket
self.bucket_name = None
self.key_name = None
self.id = id
self.initiator = None
self.owner = None
self.storage_class = None
self.initiated = None
self.part_number_marker = None
self.next_part_number_marker = None
self.max_parts = None
self.is_truncated = False
self._parts = None
def __repr__(self):
return '<MultiPartUpload %s>' % self.key_name
def __iter__(self):
return part_lister(self)
def to_xml(self):
s = '<CompleteMultipartUpload>\n'
for part in self:
s += ' <Part>\n'
s += ' <PartNumber>%d</PartNumber>\n' % part.part_number
s += ' <ETag>%s</ETag>\n' % part.etag
s += ' </Part>\n'
s += '</CompleteMultipartUpload>'
return s
def startElement(self, name, attrs, connection):
if name == 'Initiator':
self.initiator = user.User(self)
return self.initiator
elif name == 'Owner':
self.owner = user.User(self)
return self.owner
elif name == 'Part':
part = Part(self.bucket)
self._parts.append(part)
return part
return None
def endElement(self, name, value, connection):
if name == 'Bucket':
self.bucket_name = value
elif name == 'Key':
self.key_name = value
elif name == 'UploadId':
self.id = value
elif name == 'StorageClass':
self.storage_class = value
elif name == 'PartNumberMarker':
self.part_number_marker = value
elif name == 'NextPartNumberMarker':
self.next_part_number_marker = value
elif name == 'MaxParts':
self.max_parts = int(value)
elif name == 'IsTruncated':
if value == 'true':
self.is_truncated = True
else:
self.is_truncated = False
elif name == 'Initiated':
self.initiated = value
else:
setattr(self, name, value)
def get_all_parts(self, max_parts=None, part_number_marker=None,
encoding_type=None):
"""
Return the uploaded parts of this MultiPart Upload. This is
a lower-level method that requires you to manually page through
results. To simplify this process, you can just use the
object itself as an iterator and it will automatically handle
all of the paging with S3.
"""
self._parts = []
query_args = 'uploadId=%s' % self.id
if max_parts:
query_args += '&max-parts=%d' % max_parts
if part_number_marker:
query_args += '&part-number-marker=%s' % part_number_marker
if encoding_type:
query_args += '&encoding-type=%s' % encoding_type
response = self.bucket.connection.make_request('GET', self.bucket.name,
self.key_name,
query_args=query_args)
body = response.read()
if response.status == 200:
h = handler.XmlHandler(self, self)
xml.sax.parseString(body, h)
return self._parts
raise Exception("Error %d in multipart upload request - %s" % (response.status, body))
def upload_part_from_file(self, fp, part_num, headers=None, replace=True,
cb=None, num_cb=10, md5=None, size=None):
"""
Upload another part of this MultiPart Upload.
.. note::
After you initiate multipart upload and upload one or more parts,
you must either complete or abort multipart upload in order to stop
getting charged for storage of the uploaded parts. Only after you
either complete or abort multipart upload, Amazon S3 frees up the
parts storage and stops charging you for the parts storage.
:type fp: file
:param fp: The file object you want to upload.
:type part_num: int
:param part_num: The number of this part.
The other parameters are exactly as defined for the
:class:`boto.s3.key.Key` set_contents_from_file method.
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: The uploaded part containing the etag.
"""
if part_num < 1:
raise ValueError('Part numbers must be greater than zero')
query_args = 'uploadId=%s&partNumber=%d' % (self.id, part_num)
key = self.bucket.new_key(self.key_name)
key.set_contents_from_file(fp, headers=headers, replace=replace,
cb=cb, num_cb=num_cb, md5=md5,
reduced_redundancy=False,
query_args=query_args, size=size)
return key
def copy_part_from_key(self, src_bucket_name, src_key_name, part_num,
start=None, end=None, src_version_id=None,
headers=None):
"""
Copy another part of this MultiPart Upload.
:type src_bucket_name: string
:param src_bucket_name: Name of the bucket containing the source key
:type src_key_name: string
:param src_key_name: Name of the source key
:type part_num: int
:param part_num: The number of this part.
:type start: int
:param start: Zero-based byte offset to start copying from
:type end: int
:param end: Zero-based byte offset to copy to
:type src_version_id: string
:param src_version_id: version_id of source object to copy from
:type headers: dict
:param headers: Any headers to pass along in the request
"""
if part_num < 1:
raise ValueError('Part numbers must be greater than zero')
query_args = 'uploadId=%s&partNumber=%d' % (self.id, part_num)
if start is not None and end is not None:
rng = 'bytes=%s-%s' % (start, end)
provider = self.bucket.connection.provider
if headers is None:
headers = {}
else:
headers = headers.copy()
headers[provider.copy_source_range_header] = rng
return self.bucket.copy_key(self.key_name, src_bucket_name,
src_key_name,
src_version_id=src_version_id,
storage_class=None,
headers=headers,
query_args=query_args)
def complete_upload(self):
"""
Complete the MultiPart Upload operation. This method should
be called when all parts of the file have been successfully
uploaded to S3.
:rtype: :class:`boto.s3.multipart.CompletedMultiPartUpload`
:returns: An object representing the completed upload.
"""
xml = self.to_xml()
return self.bucket.complete_multipart_upload(self.key_name,
self.id, xml)
def cancel_upload(self):
"""
Cancels a MultiPart Upload operation. The storage consumed by
any previously uploaded parts will be freed. However, if any
part uploads are currently in progress, those part uploads
might or might not succeed. As a result, it might be necessary
to abort a given multipart upload multiple times in order to
completely free all storage consumed by all parts.
"""
self.bucket.cancel_multipart_upload(self.key_name, self.id)
| {
"content_hash": "e173480617db9ce7f09d9f9075932e4d",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 94,
"avg_line_length": 35.55048859934853,
"alnum_prop": 0.560197910940077,
"repo_name": "cloudera/hue",
"id": "1cf27609f620a1a92d1676aa6847f59090d02cc0",
"size": "12144",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/boto-2.46.1/boto/s3/multipart.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
} |
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authenticationsamlidppolicy_binding(base_resource):
""" Binding class showing the resources that can be bound to authenticationsamlidppolicy_binding.
"""
def __init__(self) :
self._name = ""
self.authenticationsamlidppolicy_vpnvserver_binding = []
self.authenticationsamlidppolicy_authenticationvserver_binding = []
@property
def name(self) :
ur"""Name of the SAML IdentityProvider (IdP) policy for which to display detailed information.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the SAML IdentityProvider (IdP) policy for which to display detailed information.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def authenticationsamlidppolicy_authenticationvserver_bindings(self) :
ur"""authenticationvserver that can be bound to authenticationsamlidppolicy.
"""
try :
return self._authenticationsamlidppolicy_authenticationvserver_binding
except Exception as e:
raise e
@property
def authenticationsamlidppolicy_vpnvserver_bindings(self) :
ur"""vpnvserver that can be bound to authenticationsamlidppolicy.
"""
try :
return self._authenticationsamlidppolicy_vpnvserver_binding
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authenticationsamlidppolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authenticationsamlidppolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(self, service, name) :
ur""" Use this API to fetch authenticationsamlidppolicy_binding resource.
"""
try :
if type(name) is not list :
obj = authenticationsamlidppolicy_binding()
obj.name = name
response = obj.get_resource(service)
else :
if name and len(name) > 0 :
obj = [authenticationsamlidppolicy_binding() for _ in range(len(name))]
for i in range(len(name)) :
obj[i].name = name[i];
response[i] = obj[i].get_resource(service)
return response
except Exception as e:
raise e
class authenticationsamlidppolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.authenticationsamlidppolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authenticationsamlidppolicy_binding = [authenticationsamlidppolicy_binding() for _ in range(length)]
| {
"content_hash": "7b730cd03dce2ddb25192ff93d985da3",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 137,
"avg_line_length": 33.23148148148148,
"alnum_prop": 0.7297297297297297,
"repo_name": "atopuzov/nitro-python",
"id": "70a00c007647e56941980d3403499eb59cca6842",
"size": "4203",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nssrc/com/citrix/netscaler/nitro/resource/config/authentication/authenticationsamlidppolicy_binding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "10881939"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
} |
import time, copy
import os, os.path
import sys
import numpy
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from scipy import optimize
from echem_plate_ui import *
from echem_plate_math import *
import pickle
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20120728NiFeCoTiplate1_test21Aug2012'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate1_dlist.dat'
#os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate1_LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fast_plate1'
#vshift=-.2
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9FeCoNiTi_500C_fast_CPCV_plate3_dlist.dat'
##os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate3_LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fast_plate3'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9FeCoNiTi_500C_fast_CPCV_plate2_dlist.dat'
##os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate3_LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fast_plate2'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist.dat'
#os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate1_LinSubPlots2')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fast_plate1'
#
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastrep2_plate1_dlist.dat'
##os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate3_LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastrep2_plate1'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastrep3_plate1_dlist.dat'
##os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate3_LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastrep3_plate1'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9FeCoNiTi_500C_CPCV_Plate3-rerun_dlist.dat'
##os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fastCV_plate3_LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/2012-9_FeCoNiTi_500C_fast_plate3'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20121108NiFeCoAl_F/results/NiFeCoAl_F_plate3_dlist.dat'
#os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20121108NiFeCoAl_F/results/plate3/LinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20121108NiFeCoAl_F/results/'
#p='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20121031NiFeCoTi_P/results/20121031NiFeCoTi_P_plate1_dlist.dat'#20121101NiFeCoTi_P_plate3_dlist.dat'
#os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20121031NiFeCoTi_P/results/plate1/LogLinSubPlots')
#savefolder='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/20121031NiFeCoTi_P/results/plate1'
#pl=3
#os.chdir('C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/results/plate%d/LogLinSubPlots'%pl)
#savefolder='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/results/plate%d' %pl
#if pl==1:
# p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/results/20130402NiFeCoCe_Plate1_5500_dlist.dat';vshift=-(.187-.045)
#elif pl==2:
# p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/results/20130403NiFeCoCe_Plate2_5498_dlist.dat';vshift=-(.187-.045)
#elif pl==3:
# p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/results/20130403NiFeCoCe_Plate3_4835_dlist.dat';vshift=-(.187-.045)
#os.chdir('C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130604NiFeCoCe/results/LogLinSubPlots')
#savefolder='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130604NiFeCoCe/results'
#p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130604NiFeCoCe/results/20130604NiFeCoCe_plate1_CV_6220_dlist.dat';vshift=-(.187-.043)
#pl=3
#os.chdir('C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130528NiFeCoCe3platerescan/results/plate%d/LogLinSubPlots'%pl)
#savefolder='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130528NiFeCoCe3platerescan/results/plate%d' %pl
#if pl==1:
# p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130528NiFeCoCe3platerescan/results/20130529NiFeCoCe_plate1_5577_dlist.dat';vshift=-(.187-.045)
#elif pl==2:
# p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130528NiFeCoCe3platerescan/results/20130603NiFeCoCe_plate2_5498_dlist.dat';vshift=-(.187-.045)
#elif pl==3:
# p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130528NiFeCoCe3platerescan/results/20130528NiFeCoCe_plate3_4835_dlist.dat';vshift=-(.187-0.045)
os.chdir('C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130610NiFeCoCesingle_6321/results/LogLinSubPlots')
savefolder='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130610NiFeCoCesingle_6321/results'
p='C:/Users/Public/Documents/EchemDropRawData/NiFeCoCe/20130612NiFeCoCesingle_6321/results/20130612NiFeCoCe_plate1_CVpostCP_6321_dlist.dat';vshift=-(.187-0.045)
if not os.path.exists(savefolder):
os.mkdir(savefolder)
startpath_fom=os.path.join(savefolder, os.path.split(os.path.split(savefolder)[0])[1])
critsegVrange=.04
critsegIend=3.e-5
critsegVend=0.36
f=open(p, mode='r')
dlist=pickle.load(f)
f.close()
##filter dlist
dlist=[d for d in dlist if 'I(A)_LinSub' in d.keys()]
SGpts=10
booldev_frac=.5#this is for a binary array so anything between 0 and 1 is legit
booldev_nout=3
booldn_segstart=3*booldev_nout
dx=d['dE']
dydev_frac=.2
dydev_nout=5
dn_segstart=3*dydev_nout
dydev_abs=0.
plotbool=0
SegSG_dlist(dlist, SGpts=SGpts, order=1, k='I(A)_LinSub')
#for selecting a particular sample
#smpall=numpy.array([d['Sample'] for d in dlist])
#i=numpy.where(smpall==18)[0]
#dlist=[dlist[i]]
#delete previous tael calculations
for d in dlist:
segd=d['segprops_dlist'][0]
for k in segd.keys():
if k.startswith('Tafel'):
del segd[k]
for k in d.keys():
if k.startswith('Tafel'):
del d[k]
##save csv of FOM
##calculate V for critical I, etc
for count, d in enumerate(dlist):
inds=d['segprops_dlist'][0]['inds']
#d['CV6fwdImax']=numpy.max(d['I(A)'][inds])
i=d['I(A)_LinSub_SG'][inds]
v=d['Ewe(V)'][inds]+vshift
posinds=numpy.where(i>5e-8)
invboolarr=numpy.float32(i<=5.e-8)
istart_segs, len_segs, fitdy_segs, fitinterc_segs=findzerosegs(invboolarr, booldev_frac, booldev_nout, booldn_segstart, SGnpts=10, plotbool=False, dx=1., maxfracoutliers=.5)
if len(istart_segs)==0:
print 'no positive segments found for ', count, ', sample ', d['Sample']
continue
ind=numpy.argmax(len_segs)
i0=istart_segs[ind]
i1=i0+len_segs[ind]
taffitinds=numpy.arange(i0, i1)
d['segprops_dlist'][0]['TafelFitInds']=inds[taffitinds]
i=i[i0:i1]
i[i<5e-8]=5e-8 #needed due to outliers
v=v[i0:i1]
il=numpy.log10(i)
try:
istart_segs, len_segs, fitdy_segs, fitinterc_segs, dy=findlinearsegs(il, dydev_frac, dydev_nout, dn_segstart, dydev_abs=dydev_abs, plotbool=plotbool, dx=dx, critdy_fracmaxdy=None)
except:
print 'error finding Tafel segments found for ', count, ', sample ', d['Sample']
continue
if len(istart_segs)==0:
print 'no Tafel segments found for ', count, ', sample ', d['Sample']
continue
#only take those segments covering a certain V range and with a min current for the top 10th of the V range in the segment and positive slope for there on out and then take the steepest one.
ind=None
maxdy=0
npts=critsegVrange/dx
npts2=max(2, npts//10+1)
for count2, (it0, slen, dyv) in enumerate(zip(istart_segs, len_segs, fitdy_segs)):
#print '**', count2
#print slen
if slen<npts:
continue
it1=it0+slen
#print numpy.mean(i[it1-npts2:it1])
if numpy.mean(i[it1-npts2:it1])<critsegIend:
continue
#print numpy.mean(v[it1-npts2:it1])
if numpy.mean(v[it1-npts2:it1])<critsegVend:
continue
#print numpy.any(dy[it1:]<0.)
if numpy.any(dy[it1:]<0.):
continue
#print dyv, maxdy
if dyv>maxdy:
maxdy=dyv
ind=count2
if ind is None:
print 'no Tafel segments found for ', count, ', sample ', d['Sample']
continue
#just take the longest
#ind=numpy.argmax(len_segs)
i0=istart_segs[ind]
i1=i0+len_segs[ind]
tafinds=numpy.arange(i0, i1)
it=il[tafinds]
vt=v[tafinds]
fitdy, fitint=numpy.polyfit(vt, it, 1)
d['segprops_dlist'][0]['TafelInds']=inds[taffitinds][tafinds]
d['TafelSlopeVperdec']=1./fitdy
d['TafelEstart_TafelValue']=v[0]
d['TafelFitVrange']=vt.max()-vt.min()
d['TafelLogExCurrent']=fitint
##making 10-sample plots of linear subtraction
cols=['k','b', 'g', 'r', 'c', 'm', 'y', 'brown', 'purple', 'grey']
smpall=numpy.array([d['Sample'] for d in dlist])
dinds=numpy.argsort(smpall)
plotcount=0
smpl=[]
pylab.figure()
for di in dinds:
d=dlist[di]
if plotcount==10:
s='_'.join([`smp` for smp in smpl])
pylab.title(s)
pylab.savefig(s)
plotcount=0
smpl=[]
pylab.figure()
for segd in d['segprops_dlist']:#[2:3]:
for st, k in zip([':', '--', '-'], ['inds', 'TafelFitInds', 'TafelInds']):
if not k in segd.keys():
continue
x=d['Ewe(V)'][segd[k]]+vshift
y=d['I(A)_LinSub'][segd[k]]
posinds=numpy.where(y>5e-8)
x=x[posinds]
y=numpy.log10(y[posinds])
pylab.plot(x, y, st, color=cols[plotcount])
break
smpl+=[d['Sample']]
plotcount+=1
#pylab.show()
savekeys=['TafelSlopeVperdec','TafelEstart_TafelValue','TafelFitVrange','TafelLogExCurrent']
def writefile(p, dlist, savedlist=True, fomkey='FOM'):
if len(dlist)==0:
print 'no data to save'
return
labels=['Sample', 'x(mm)', 'y(mm)']
labels+=dlist[0]['elements']
labels+=[fomkey]
kv_fmt=[('Sample', '%d'), ('x', '%.2f'), ('y', '%.2f'), ('compositions', '%.4f'), (fomkey, '%.6e')]
arr=[]
for d in dlist:
arr2=[]
for k, fmt in kv_fmt:
if not k in d.keys():
v=numpy.nan
else:
v=d[k]
if isinstance(v, numpy.ndarray) or isinstance(v, list):
for subv in v:
arr2+=[fmt %subv]
else:
arr2+=[fmt %v]
arr+=['\t'.join(arr2)]
s='\t'.join(labels)+'\n'
s+='\n'.join(arr)
f=open(p, mode='w')
f.write(s)
f.close()
if savedlist:
f=open(p[:-4]+'_dlist.pck', mode='w')
pickle.dump(dlist, f)
f.close()
for fomkey in savekeys:
pf=startpath_fom+'_'+fomkey+'.txt'
#p=p[::-1].replace('plate'[::-1], 'plate1'[::-1], 1)[::-1]#temporary fix for file naming for stacked_tern4
writefile(pf, dlist, savedlist=False, fomkey=fomkey)
if 1:
f=open(p, mode='w')
pickle.dump(dlist, f)
f.close()
| {
"content_hash": "682e9b51f4d04f6629b94d95167b786f",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 195,
"avg_line_length": 41.10726643598616,
"alnum_prop": 0.6945286195286196,
"repo_name": "johnmgregoire/JCAPdatavis",
"id": "5704966d7c0d352ab83bd68fbb4a5da764bdbc05",
"size": "11881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cvprocess_tempTafel_pickledlist4.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "711015"
}
],
"symlink_target": ""
} |
from bisect import bisect_left
from collections import deque, Hashable
from Ranger.src.Collections.RangeMap import RangeMap
from Ranger.src.Range.Range import Range
from Ranger.src.Range.Cut import Cut
class RangeBucketMap(RangeMap):
""" Class used to represent a mapping of disjoint ranges to sets of items. Ranges
do not coalesce. However, if a new Range is added over an existing Range, items
belonging to the existing Range are retained in that Range
"""
def __init__(self, rangeDict = None):
""" Instantiates a RangeBucketMap
Parameters
----------
rangeDict : Dictionary of Range -> object
Dictionary to start off the RangeBucketMap with
"""
self.recurseAdd = False
super(RangeBucketMap, self).__init__(rangeDict)
def iteritems(self, start = None, end = None):
""" Iterates over pairs of (Range, value)
Parameters
----------
start : comparable, optional
The starting point for iterating, inclusive
end : comparable, optional
The ending point for iterating, inclusive
Returns
-------
Generator of (Range intersecting [start,end], value), ordered by start point
"""
if start is None:
start = self.lower_cuts[0]
else:
start = Cut.belowValue(start)
if end is None:
end = self.upper_cuts[-1]
else:
end = Cut.aboveValue(end)
bounding_range = Range(start, end)
# Get the bounding indices
ovlapLowerInd = max(bisect_left(self.lower_cuts, start)-1,0)
ovlapUpperInd = bisect_left(self.lower_cuts, end)
# Create queue of values that need to be generated
yield_vals = deque()
# Create dictionary of values to be generated -> indices containing them
vals_inds_dict = {}
for i in range(ovlapLowerInd, ovlapUpperInd):
# Check if anything can be released from the queue
while len(yield_vals) > 0:
if vals_inds_dict[yield_vals[0]][-1] < i-1:
# Yield the full range, value. Remove value from queue
val = yield_vals.popleft()
yield Range(max(self.lower_cuts[vals_inds_dict[val][0]],start),
min(self.upper_cuts[vals_inds_dict[val][-1]],end)), val
# Remove value from dict
del vals_inds_dict[val]
else:
break
try:
# Get intersection of the ranges
intersect = bounding_range.intersection(self.ranges[i])
if not intersect.isEmpty():
# If overlapping with this range, put into queue
for val in self.items[i]:
if val not in vals_inds_dict:
yield_vals.append(val)
vals_inds_dict[val] = deque()
vals_inds_dict[val].append(i)
except ValueError:
# Continue if no overlap with this range
continue
## Yield remaining values
while len(yield_vals) > 0:
# Yield the full range, value. Remove value from queue
val = yield_vals.popleft()
yield Range(max(self.lower_cuts[vals_inds_dict[val][0]],start),
min(self.upper_cuts[vals_inds_dict[val][-1]],end)), val
# Remove value from dict
del vals_inds_dict[val]
def get(self, key):
""" Get the item(s) corresponding to a given key. The key can be a
Range or a single value that is within a Range
Parameters
----------
key : comparable
A single value or Range object
Raises
------
KeyError
If there is no overlap with the key
ValueError
If the key type not compatible with the ranges
Returns
-------
A set containing all overlapping items
"""
if not self.overlaps(key):
self.__missing__(key)
elif isinstance(key, Range):
# If this is a single value
returnSet = set()
# Get the bounding indices
ovlapLowerInd = max(bisect_left(self.lower_cuts, key.lowerCut)-1,0)
ovlapUpperInd = bisect_left(self.lower_cuts, key.upperCut)
for i in range(ovlapLowerInd, ovlapUpperInd):
try:
# Get intersection of the ranges
intersect = key.intersection(self.ranges[i])
if not intersect.isEmpty():
# If overlapping with this range, put its
# item in the return set
returnSet = returnSet.union(self.items[i])
except ValueError:
# Continue if no overlap with this range
continue
# Return the set of items
return returnSet
else:
# If this is a single value
# Get the index of the range containing the value
lower_ind = max(bisect_left(self.lower_cuts, key)-1,0)
# Return the item at that value
return self.items[lower_ind]
def put(self, key, val):
""" Creates a mapping from a Range to a value, adding to
any existing values over that Range
Parameters
----------
key : Range object
A Range to serve as a key
val : value, hashable
Some value that the Range should map to
Raises
------
TypeError
If the key is not a Range object or value is not hashable
"""
if not isinstance(key, Range):
raise TypeError("key is not a Range")
elif not any((isinstance(val, Hashable), self.recurseAdd)):
raise TypeError("value not hashable")
elif key.isEmpty():
# Skip if this is an empty range
return
# Figure out where to the key/value
if not self.overlaps(key):
# If this range is completely on its own, just insert
insertInd = bisect_left(self.lower_cuts, key.lowerCut)
self.ranges.insert(insertInd, key)
self.lower_cuts.insert(insertInd, key.lowerCut)
self.upper_cuts.insert(insertInd, key.upperCut)
if not isinstance(val, set):
self.items.insert(insertInd, set([val]))
else:
self.items.insert(insertInd, val)
return
else:
# If this range has some overlap with existing ranges
ovlapLowerInd = max(bisect_left(self.lower_cuts, key.lowerCut)-1,0)
ovlapUpperInd = bisect_left(self.lower_cuts, key.upperCut)
# Create queue ranges to add
addRanges = deque()
# Create queue of items to add
addItems = deque()
# Keep track of next lower cutpoint to add
nextLowerCut = key.lowerCut
for i in range(ovlapLowerInd, ovlapUpperInd):
try:
# Get intersection of the ranges
intersect = key.intersection(self.ranges[i])
if not intersect.isEmpty():
# Add in a Range between the next LowerCut and
# the beginning of this intersection if necessary
if nextLowerCut < intersect.lowerCut:
addRanges.append(Range(nextLowerCut, intersect.lowerCut))
addItems.append(val)
nextLowerCut = intersect.lowerCut
if intersect == self.ranges[i]:
## If key encompassing existing Range ##
# Add item to this range
self.items[i].add(val)
# Change the next lower cut
nextLowerCut = intersect.upperCut
elif self.lower_cuts[i] == intersect.lowerCut:
## If key upper cutpoint enclosed by existing Range ##
# Add in the rest of the original Range
if self.upper_cuts[i] > intersect.upperCut:
addRanges.append(Range(intersect.upperCut,
self.upper_cuts[i]))
addItems.append(set(self.items[i]))
# Define original part to be shorter
self.upper_cuts[i] = intersect.upperCut
self.ranges[i] = Range(self.lower_cuts[i],
intersect.upperCut)
self.items[i].add(val)
# Change the next lower cut
nextLowerCut = intersect.upperCut
elif self.upper_cuts[i] == intersect.upperCut:
## If key lower cutpoint enclosed by existing Range ##
# Add in the rest of the original Range
if intersect.lowerCut > self.lower_cuts[i]:
addRanges.append(Range(self.lower_cuts[i], intersect.lowerCut))
addItems.append(set(self.items[i]))
# Define original part to be shorter
self.lower_cuts[i] = intersect.lowerCut
self.ranges[i] = Range(self.lower_cuts[i],
intersect.upperCut)
self.items[i].add(val)
# Change the next lower cut
nextLowerCut = intersect.upperCut
else:
# If entire key enclosed by existing Range
# Add in lower part of original Range
addRanges.append(Range(self.lower_cuts[i], intersect.lowerCut))
addItems.append(set(self.items[i]))
# Add in upper part of original Range
addRanges.append(Range(intersect.upperCut, self.upper_cuts[i]))
addItems.append(set(self.items[i]))
# Define original part to be middle
self.lower_cuts[i] = intersect.lowerCut
self.upper_cuts[i] = intersect.upperCut
self.ranges[i] = Range(intersect.lowerCut,intersect.upperCut)
self.items[i].add(val)
# Change the next lower cut
nextLowerCut = intersect.upperCut
except ValueError:
# Continue if no overlap with this range
continue
# Put in a last range if necessary
if nextLowerCut < key.upperCut:
addRanges.append(Range(nextLowerCut, key.upperCut))
addItems.append(val)
# Use recursive call to place the pairs, which now
# should not overlap with any other ranges
self.recurseAdd = True
while len(addRanges) > 0:
self.put(addRanges.pop(),addItems.pop())
self.recurseAdd = False
def remove(self, aRange):
""" Removes a range and its value(s) from the range set
Parameters
----------
aRange : A Range object
The Range to remove
Raises
------
ValueError
If removing range of type not compatible with previously
added ranges
TypeError
If not a Range
"""
if not isinstance(aRange, Range):
raise TypeError("aRange is not a Range")
elif aRange.isEmpty():
# Skip if this is an empty range
return
# Check for compatibility of types if necessary
if len(self) > 0:
if not (issubclass(aRange.lowerCut.theType,
self.ranges[0].lowerCut.theType) or \
issubclass(self.ranges[0].lowerCut.theType,
aRange.lowerCut.theType)):
raise ValueError("Range not compatible with previously added ranges")
# Check if the range actually overlaps with the key set
if not self.overlaps(aRange):
return
else:
# There's some overlap, so deal with that
# Determine where overlap occurs
ovlapLowerInd = max(bisect_left(self.lower_cuts,
aRange.lowerCut)-1,0)
ovlapUpperInd = bisect_left(self.lower_cuts, aRange.upperCut)
# Create queue of indices marked for removal
removeRanges = deque()
# Create queue of ranges to add
addRanges = deque()
# Create queue of items to add with the addRanges
addItems = deque()
for i in range(ovlapLowerInd, ovlapUpperInd):
try:
# Get intersection of the ranges
intersect = aRange.intersection(self.ranges[i])
if not intersect.isEmpty():
if intersect == self.ranges[i]:
# Mark range for removal
removeRanges.append(i)
elif self.lower_cuts[i] == intersect.lowerCut:
# If equal on the left cutpoint, subtract
# out left part
self.lower_cuts[i] = intersect.upperCut
self.ranges[i] = Range(intersect.upperCut,
self.upper_cuts[i])
elif self.upper_cuts[i] == intersect.upperCut:
# If equal on right cutpoint, subtract out
# right part
self.upper_cuts[i] = intersect.lowerCut
self.ranges[i] = Range(self.lower_cuts[i],
intersect.lowerCut)
else:
# If in the middle, split into two parts, putting
# both in add queue and placing the old range index
# into the remove queue
addRanges.append(Range(self.lower_cuts[i],
intersect.lowerCut))
addItems.append(set(self.items[i]))
addRanges.append(Range(intersect.upperCut,
self.upper_cuts[i]))
addItems.append(set(self.items[i]))
removeRanges.append(i)
except ValueError:
# Continue if no overlap with this range
continue
# Remove any ranges that are marked for removal
while len(removeRanges) > 0:
removeInd = removeRanges.pop()
self.ranges.pop(removeInd)
self.lower_cuts.pop(removeInd)
self.upper_cuts.pop(removeInd)
self.items.pop(removeInd)
# Add any pairs that need to be added
self.recurseAdd = True
while len(addRanges) > 0:
self.put(addRanges.pop(), addItems.pop())
self.recurseAdd = False
| {
"content_hash": "ad7c80372c5747b1bc6e59d8c6abfbc2",
"timestamp": "",
"source": "github",
"line_count": 338,
"max_line_length": 95,
"avg_line_length": 47.16863905325444,
"alnum_prop": 0.501160383867528,
"repo_name": "er432/Ranger",
"id": "78f0be993c7832de48f267670b383682d0623624",
"size": "15943",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Ranger/src/Collections/RangeBucketMap.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "125045"
}
],
"symlink_target": ""
} |
"""Tests for the custom pyfes types"""
import pytest
from pyfes.fes20 import expressions
from pyfes.fes20 import operators
from pyfes import errors
pytestmark = pytest.mark.unit
@pytest.mark.parametrize("operand", [
expressions.ValueReference("first"),
expressions.Literal("second"),
expressions.Function("third"),
])
def test_validate_operand(operand):
operators.validate_operand(operand)
@pytest.mark.parametrize("operand, allowed", [
(
expressions.ValueReference("first"),
(expressions.Literal, expressions.Function)
),
(
expressions.Literal("second"),
(expressions.ValueReference, expressions.Function)
),
(
expressions.Function("third"),
(expressions.ValueReference, expressions.Literal)
),
])
def test_validate_operand_invalid(operand, allowed):
with pytest.raises(errors.InvalidExpressionError):
operators.validate_operand(operand=operand, allowed_types=allowed)
@pytest.mark.parametrize("first, second", [
(
operators.BinaryComparisonOperator(
operator_type=operators.BinaryComparisonName.PROPERTY_IS_EQUAL_TO,
first_expression=expressions.ValueReference("this"),
second_expression=expressions.Literal("that"),
match_action=operators.MatchAction.ANY,
match_case=True
),
operators.BinaryComparisonOperator(
operator_type=operators.BinaryComparisonName.PROPERTY_IS_EQUAL_TO,
first_expression=expressions.ValueReference("this"),
second_expression=expressions.Literal("that"),
match_action=operators.MatchAction.ANY,
match_case=True
)
),
])
def test_operator_equality(first, second):
assert first == second
@pytest.mark.parametrize("operator_type", [
"PropertyIsEqualTo",
"PropertyIsNotEqualTo",
"PropertyIsGreaterThan",
"PropertyIsGreaterThanOrEqualTo",
"PropertyIsLessThan",
"PropertyIsLessThanOrEqualTo",
])
def test_binary_comparison_operator_type(operator_type):
first_expression = expressions.ValueReference("fake")
second_expression = expressions.ValueReference("phony")
operator = operators.BinaryComparisonOperator(
operator_type=operator_type,
first_expression=first_expression,
second_expression=second_expression,
)
assert operator.operator_type == operators.BinaryComparisonName(
operator_type)
@pytest.mark.parametrize("type_, first, second, match_action, expected", [
(
"fake_type",
expressions.ValueReference("first"),
expressions.ValueReference("second"),
operators.MatchAction.ANY,
errors.InvalidOperatorError
),
(
operators.BinaryComparisonName.PROPERTY_IS_EQUAL_TO,
"first",
expressions.ValueReference("second"),
operators.MatchAction.ANY,
errors.InvalidExpressionError
),
(
operators.BinaryComparisonName.PROPERTY_IS_EQUAL_TO,
expressions.ValueReference("first"),
"second",
operators.MatchAction.ANY,
errors.InvalidExpressionError
),
(
operators.BinaryComparisonName.PROPERTY_IS_EQUAL_TO,
expressions.ValueReference("first"),
expressions.ValueReference("second"),
"fake action",
ValueError
),
])
def test_binary_comparison_operator_invalid(type_, first, second, match_action,
expected):
with pytest.raises(expected):
operators.BinaryComparisonOperator(
operator_type=type_,
first_expression=first,
second_expression=second,
match_action=match_action
)
| {
"content_hash": "f98981e1e4e70aecc2f7256ceee67222",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 79,
"avg_line_length": 31.56198347107438,
"alnum_prop": 0.6509557475779,
"repo_name": "geopython/pyfes",
"id": "cbb36d0c8dd4c66e05109ced147f663cbdf0789e",
"size": "3819",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/unittests/test_fes20_operators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "50368"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class AlignValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="align", parent_name="volume.hoverlabel", **kwargs):
super(AlignValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
values=kwargs.pop("values", ["left", "right", "auto"]),
**kwargs,
)
| {
"content_hash": "359a2412b49a48d9f596ad46963384f5",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 87,
"avg_line_length": 40.30769230769231,
"alnum_prop": 0.601145038167939,
"repo_name": "plotly/plotly.py",
"id": "699a124b262f38bbbd3b69884066b7bbb6a54fea",
"size": "524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/volume/hoverlabel/_align.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
'''Simple window'''
import sys
from PyQt4 import QtGui
app = QtGui.QApplication(sys.argv)
w = QtGui.QWidget()
w.show()
sys.exit(app.exec_())
| {
"content_hash": "91720cca3d1668e74645e4c51e891019",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 34,
"avg_line_length": 14.4,
"alnum_prop": 0.6944444444444444,
"repo_name": "aroberge/qt_py",
"id": "3a8ea0ec454eae44973acf3750a0a05b49c0d63b",
"size": "144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyqt_examples/window1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "93834"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
VERSION = '0.2'
SETUP_DICT = dict(
name='smartserver',
packages=['egauge'],
version=VERSION,
author='mandarjog',
author_email='mandarjog@gmail.com',
url='https://github.com/PlotWatt/Smartmeters',
description='smart server config',
long_description="smart server config",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
# generate .rst file with documentation
#open(os.path.join(os.path.dirname(__file__), 'documentation.rst'), 'w').write(DOCUMENTATION)
setup(**SETUP_DICT)
| {
"content_hash": "1031ee5b46dd4a7b26997620f8004fed",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 93,
"avg_line_length": 29.151515151515152,
"alnum_prop": 0.6403326403326404,
"repo_name": "SoftwareArtisan/smartmeter",
"id": "f4761833b3e56157b36330f57213496a9de8d485",
"size": "962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60303"
},
{
"name": "Shell",
"bytes": "910"
}
],
"symlink_target": ""
} |
"""
CLI interface for nova management.
"""
import netaddr
import os
import sys
from oslo.config import cfg
from nova.api.ec2 import ec2utils
from nova import availability_zones
from nova.cells import rpc_driver
from nova.compute import flavors
from nova import config
from nova import context
from nova import db
from nova.db import migration
from nova import exception
from nova.openstack.common import cliutils
from nova.openstack.common.db import exception as db_exc
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
from nova import quota
from nova import servicegroup
from nova import version
CONF = cfg.CONF
CONF.import_opt('network_manager', 'nova.service')
CONF.import_opt('service_down_time', 'nova.service')
CONF.import_opt('flat_network_bridge', 'nova.network.manager')
CONF.import_opt('num_networks', 'nova.network.manager')
CONF.import_opt('multi_host', 'nova.network.manager')
CONF.import_opt('network_size', 'nova.network.manager')
CONF.import_opt('vlan_start', 'nova.network.manager')
CONF.import_opt('vpn_start', 'nova.network.manager')
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('public_interface', 'nova.network.linux_net')
QUOTAS = quota.QUOTAS
# Decorators for actions
def args(*args, **kwargs):
def _decorator(func):
func.__dict__.setdefault('args', []).insert(0, (args, kwargs))
return func
return _decorator
def param2id(object_id):
"""Helper function to convert various volume id types to internal id.
args: [object_id], e.g. 'vol-0000000a' or 'volume-0000000a' or '10'
"""
if '-' in object_id:
return ec2utils.ec2_vol_id_to_uuid(object_id)
else:
return object_id
class VpnCommands(object):
"""Class for managing VPNs."""
@args('--project', dest='project_id', metavar='<Project name>',
help='Project name')
@args('--ip', metavar='<IP Address>', help='IP Address')
@args('--port', metavar='<Port>', help='Port')
def change(self, project_id, ip, port):
"""Change the ip and port for a vpn.
this will update all networks associated with a project
not sure if that's the desired behavior or not, patches accepted
"""
# TODO(tr3buchet): perhaps this shouldn't update all networks
# associated with a project in the future
admin_context = context.get_admin_context()
networks = db.project_get_networks(admin_context, project_id)
for network in networks:
db.network_update(admin_context,
network['id'],
{'vpn_public_address': ip,
'vpn_public_port': int(port)})
class ShellCommands(object):
def bpython(self):
"""Runs a bpython shell.
Falls back to Ipython/python shell if unavailable
"""
self.run('bpython')
def ipython(self):
"""Runs an Ipython shell.
Falls back to Python shell if unavailable
"""
self.run('ipython')
def python(self):
"""Runs a python shell.
Falls back to Python shell if unavailable
"""
self.run('python')
@args('--shell', metavar='<bpython|ipython|python >',
help='Python shell')
def run(self, shell=None):
"""Runs a Python interactive interpreter."""
if not shell:
shell = 'bpython'
if shell == 'bpython':
try:
import bpython
bpython.embed()
except ImportError:
shell = 'ipython'
if shell == 'ipython':
try:
import IPython
# Explicitly pass an empty list as arguments, because
# otherwise IPython would use sys.argv from this script.
shell = IPython.Shell.IPShell(argv=[])
shell.mainloop()
except ImportError:
shell = 'python'
if shell == 'python':
import code
try:
# Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try',
# because we already know 'readline' was imported successfully.
readline.parse_and_bind("tab:complete")
code.interact()
@args('--path', metavar='<path>', help='Script path')
def script(self, path):
"""Runs the script from the specified path with flags set properly.
arguments: path
"""
exec(compile(open(path).read(), path, 'exec'), locals(), globals())
def _db_error(caught_exception):
print caught_exception
print _("The above error may show that the database has not "
"been created.\nPlease create a database using "
"'nova-manage db sync' before running this command.")
exit(1)
class ProjectCommands(object):
"""Class for managing projects."""
@args('--project', dest='project_id', metavar='<Project name>',
help='Project name')
@args('--key', metavar='<key>', help='Key')
@args('--value', metavar='<value>', help='Value')
def quota(self, project_id, key=None, value=None):
"""
Create, update or display quotas for project
If no quota key is provided, the quota will be displayed.
If a valid quota key is provided and it does not exist,
it will be created. Otherwise, it will be updated.
"""
ctxt = context.get_admin_context()
project_quota = QUOTAS.get_project_quotas(ctxt, project_id)
# if key is None, that means we need to show the quotas instead
# of updating them
if key:
if key in project_quota:
if value.lower() == 'unlimited':
value = -1
try:
db.quota_create(ctxt, project_id, key, value)
except exception.QuotaExists:
db.quota_update(ctxt, project_id, key, value)
else:
print _('%(key)s is not a valid quota key. Valid options are: '
'%(options)s.') % {'key': key,
'options': ', '.join(project_quota)}
return(2)
print_format = "%-36s %-10s %-10s %-10s"
print print_format % (
_('Quota'),
_('Limit'),
_('In Use'),
_('Reserved'))
# Retrieve the quota after update
project_quota = QUOTAS.get_project_quotas(ctxt, project_id)
for key, value in project_quota.iteritems():
if value['limit'] < 0 or value['limit'] is None:
value['limit'] = 'unlimited'
print print_format % (key, value['limit'], value['in_use'],
value['reserved'])
@args('--project', dest='project_id', metavar='<Project name>',
help='Project name')
def scrub(self, project_id):
"""Deletes data associated with project."""
admin_context = context.get_admin_context()
networks = db.project_get_networks(admin_context, project_id)
for network in networks:
db.network_disassociate(admin_context, network['id'])
groups = db.security_group_get_by_project(admin_context, project_id)
for group in groups:
db.security_group_destroy(admin_context, group['id'])
AccountCommands = ProjectCommands
class FixedIpCommands(object):
"""Class for managing fixed ip."""
@args('--host', metavar='<host>', help='Host')
def list(self, host=None):
"""Lists all fixed ips (optionally by host)."""
ctxt = context.get_admin_context()
try:
if host is None:
fixed_ips = db.fixed_ip_get_all(ctxt)
else:
fixed_ips = db.fixed_ip_get_by_host(ctxt, host)
except exception.NotFound as ex:
print _("error: %s") % ex
return(2)
instances = db.instance_get_all(context.get_admin_context())
instances_by_uuid = {}
for instance in instances:
instances_by_uuid[instance['uuid']] = instance
print "%-18s\t%-15s\t%-15s\t%s" % (_('network'),
_('IP address'),
_('hostname'),
_('host'))
all_networks = {}
try:
# use network_get_all to retrieve all existing networks
# this is to ensure that IPs associated with deleted networks
# will not throw exceptions.
for network in db.network_get_all(context.get_admin_context()):
all_networks[network.id] = network
except exception.NoNetworksFound:
# do not have any networks, so even if there are IPs, these
# IPs should have been deleted ones, so return.
print _('No fixed IP found.')
return
has_ip = False
for fixed_ip in fixed_ips:
hostname = None
host = None
network = all_networks.get(fixed_ip['network_id'])
if network:
has_ip = True
if fixed_ip.get('instance_uuid'):
instance = instances_by_uuid.get(fixed_ip['instance_uuid'])
if instance:
hostname = instance['hostname']
host = instance['host']
else:
print _('WARNING: fixed ip %s allocated to missing'
' instance') % str(fixed_ip['address'])
print "%-18s\t%-15s\t%-15s\t%s" % (
network['cidr'],
fixed_ip['address'],
hostname, host)
if not has_ip:
print _('No fixed IP found.')
@args('--address', metavar='<ip address>', help='IP address')
def reserve(self, address):
"""Mark fixed ip as reserved
arguments: address
"""
return self._set_reserved(address, True)
@args('--address', metavar='<ip address>', help='IP address')
def unreserve(self, address):
"""Mark fixed ip as free to use
arguments: address
"""
return self._set_reserved(address, False)
def _set_reserved(self, address, reserved):
ctxt = context.get_admin_context()
try:
fixed_ip = db.fixed_ip_get_by_address(ctxt, address)
if fixed_ip is None:
raise exception.NotFound('Could not find address')
db.fixed_ip_update(ctxt, fixed_ip['address'],
{'reserved': reserved})
except exception.NotFound as ex:
print _("error: %s") % ex
return(2)
class FloatingIpCommands(object):
"""Class for managing floating ip."""
@staticmethod
def address_to_hosts(addresses):
"""
Iterate over hosts within an address range.
If an explicit range specifier is missing, the parameter is
interpreted as a specific individual address.
"""
try:
return [netaddr.IPAddress(addresses)]
except ValueError:
net = netaddr.IPNetwork(addresses)
if net.size < 4:
reason = _("/%s should be specified as single address(es) "
"not in cidr format") % net.prefixlen
raise exception.InvalidInput(reason=reason)
elif net.size >= 1000000:
# NOTE(dripton): If we generate a million IPs and put them in
# the database, the system will slow to a crawl and/or run
# out of memory and crash. This is clearly a misconfiguration.
reason = _("Too many IP addresses will be generated. Please "
"increase /%s to reduce the number generated."
) % net.prefixlen
raise exception.InvalidInput(reason=reason)
else:
return net.iter_hosts()
@args('--ip_range', metavar='<range>', help='IP range')
@args('--pool', metavar='<pool>', help='Optional pool')
@args('--interface', metavar='<interface>', help='Optional interface')
def create(self, ip_range, pool=None, interface=None):
"""Creates floating ips for zone by range."""
admin_context = context.get_admin_context()
if not pool:
pool = CONF.default_floating_pool
if not interface:
interface = CONF.public_interface
ips = ({'address': str(address), 'pool': pool, 'interface': interface}
for address in self.address_to_hosts(ip_range))
try:
db.floating_ip_bulk_create(admin_context, ips)
except exception.FloatingIpExists as exc:
# NOTE(simplylizz): Maybe logging would be better here
# instead of printing, but logging isn't used here and I
# don't know why.
print('error: %s' % exc)
return(1)
@args('--ip_range', metavar='<range>', help='IP range')
def delete(self, ip_range):
"""Deletes floating ips by range."""
admin_context = context.get_admin_context()
ips = ({'address': str(address)}
for address in self.address_to_hosts(ip_range))
db.floating_ip_bulk_destroy(admin_context, ips)
@args('--host', metavar='<host>', help='Host')
def list(self, host=None):
"""Lists all floating ips (optionally by host).
Note: if host is given, only active floating IPs are returned
"""
ctxt = context.get_admin_context()
try:
if host is None:
floating_ips = db.floating_ip_get_all(ctxt)
else:
floating_ips = db.floating_ip_get_all_by_host(ctxt, host)
except exception.NoFloatingIpsDefined:
print _("No floating IP addresses have been defined.")
return
for floating_ip in floating_ips:
instance_uuid = None
if floating_ip['fixed_ip_id']:
fixed_ip = db.fixed_ip_get(ctxt, floating_ip['fixed_ip_id'])
instance_uuid = fixed_ip['instance_uuid']
print "%s\t%s\t%s\t%s\t%s" % (floating_ip['project_id'],
floating_ip['address'],
instance_uuid,
floating_ip['pool'],
floating_ip['interface'])
class NetworkCommands(object):
"""Class for managing networks."""
@args('--label', metavar='<label>', help='Label for network (ex: public)')
@args('--fixed_range_v4', dest='cidr', metavar='<x.x.x.x/yy>',
help='IPv4 subnet (ex: 10.0.0.0/8)')
@args('--num_networks', metavar='<number>',
help='Number of networks to create')
@args('--network_size', metavar='<number>',
help='Number of IPs per network')
@args('--vlan', dest='vlan_start', metavar='<vlan id>', help='vlan id')
@args('--vpn', dest='vpn_start', help='vpn start')
@args('--fixed_range_v6', dest='cidr_v6',
help='IPv6 subnet (ex: fe80::/64')
@args('--gateway', help='gateway')
@args('--gateway_v6', help='ipv6 gateway')
@args('--bridge', metavar='<bridge>',
help='VIFs on this network are connected to this bridge')
@args('--bridge_interface', metavar='<bridge interface>',
help='the bridge is connected to this interface')
@args('--multi_host', metavar="<'T'|'F'>",
help='Multi host')
@args('--dns1', metavar="<DNS Address>", help='First DNS')
@args('--dns2', metavar="<DNS Address>", help='Second DNS')
@args('--uuid', metavar="<network uuid>", help='Network UUID')
@args('--fixed_cidr', metavar='<x.x.x.x/yy>',
help='IPv4 subnet for fixed IPS (ex: 10.20.0.0/16)')
@args('--project_id', metavar="<project id>",
help='Project id')
@args('--priority', metavar="<number>", help='Network interface priority')
def create(self, label=None, cidr=None, num_networks=None,
network_size=None, multi_host=None, vlan_start=None,
vpn_start=None, cidr_v6=None, gateway=None,
gateway_v6=None, bridge=None, bridge_interface=None,
dns1=None, dns2=None, project_id=None, priority=None,
uuid=None, fixed_cidr=None):
"""Creates fixed ips for host by range."""
kwargs = dict(((k, v) for k, v in locals().iteritems()
if v and k != "self"))
if multi_host is not None:
kwargs['multi_host'] = multi_host == 'T'
net_manager = importutils.import_object(CONF.network_manager)
net_manager.create_networks(context.get_admin_context(), **kwargs)
def list(self):
"""List all created networks."""
_fmt = "%-5s\t%-18s\t%-15s\t%-15s\t%-15s\t%-15s\t%-15s\t%-15s\t%-15s"
print _fmt % (_('id'),
_('IPv4'),
_('IPv6'),
_('start address'),
_('DNS1'),
_('DNS2'),
_('VlanID'),
_('project'),
_("uuid"))
try:
# Since network_get_all can throw exception.NoNetworksFound
# for this command to show a nice result, this exception
# should be caught and handled as such.
networks = db.network_get_all(context.get_admin_context())
except exception.NoNetworksFound:
print _('No networks found')
else:
for network in networks:
print _fmt % (network.id,
network.cidr,
network.cidr_v6,
network.dhcp_start,
network.dns1,
network.dns2,
network.vlan,
network.project_id,
network.uuid)
@args('--fixed_range', metavar='<x.x.x.x/yy>', help='Network to delete')
@args('--uuid', metavar='<uuid>', help='UUID of network to delete')
def delete(self, fixed_range=None, uuid=None):
"""Deletes a network."""
if fixed_range is None and uuid is None:
raise Exception(_("Please specify either fixed_range or uuid"))
net_manager = importutils.import_object(CONF.network_manager)
if "NeutronManager" in CONF.network_manager:
if uuid is None:
raise Exception(_("UUID is required to delete "
"Neutron Networks"))
if fixed_range:
raise Exception(_("Deleting by fixed_range is not supported "
"with the NeutronManager"))
# delete the network
net_manager.delete_network(context.get_admin_context(),
fixed_range, uuid)
@args('--fixed_range', metavar='<x.x.x.x/yy>', help='Network to modify')
@args('--project', metavar='<project name>',
help='Project name to associate')
@args('--host', metavar='<host>', help='Host to associate')
@args('--disassociate-project', action="store_true", dest='dis_project',
default=False, help='Disassociate Network from Project')
@args('--disassociate-host', action="store_true", dest='dis_host',
default=False, help='Disassociate Host from Project')
def modify(self, fixed_range, project=None, host=None,
dis_project=None, dis_host=None):
"""Associate/Disassociate Network with Project and/or Host
arguments: network project host
leave any field blank to ignore it
"""
admin_context = context.get_admin_context()
network = db.network_get_by_cidr(admin_context, fixed_range)
net = {}
#User can choose the following actions each for project and host.
#1) Associate (set not None value given by project/host parameter)
#2) Disassociate (set None by disassociate parameter)
#3) Keep unchanged (project/host key is not added to 'net')
if dis_project:
net['project_id'] = None
if dis_host:
net['host'] = None
# The --disassociate-X are boolean options, but if they user
# mistakenly provides a value, it will be used as a positional argument
# and be erroneously interepreted as some other parameter (e.g.
# a project instead of host value). The safest thing to do is error-out
# with a message indicating that there is probably a problem with
# how the disassociate modifications are being used.
if dis_project or dis_host:
if project or host:
error_msg = "ERROR: Unexpected arguments provided. Please " \
"use separate commands."
print(error_msg)
return(1)
db.network_update(admin_context, network['id'], net)
return
if project:
net['project_id'] = project
if host:
net['host'] = host
db.network_update(admin_context, network['id'], net)
class VmCommands(object):
"""Class for mangaging VM instances."""
@args('--host', metavar='<host>', help='Host')
def list(self, host=None):
"""Show a list of all instances."""
print ("%-10s %-15s %-10s %-10s %-26s %-9s %-9s %-9s"
" %-10s %-10s %-10s %-5s" % (_('instance'),
_('node'),
_('type'),
_('state'),
_('launched'),
_('image'),
_('kernel'),
_('ramdisk'),
_('project'),
_('user'),
_('zone'),
_('index')))
if host is None:
instances = db.instance_get_all(context.get_admin_context())
else:
instances = db.instance_get_all_by_host(
context.get_admin_context(), host)
for instance in instances:
instance_type = flavors.extract_flavor(instance)
print ("%-10s %-15s %-10s %-10s %-26s %-9s %-9s %-9s"
" %-10s %-10s %-10s %-5d" % (instance['display_name'],
instance['host'],
instance_type['name'],
instance['vm_state'],
instance['launched_at'],
instance['image_ref'],
instance['kernel_id'],
instance['ramdisk_id'],
instance['project_id'],
instance['user_id'],
instance['availability_zone'],
instance['launch_index']))
class ServiceCommands(object):
"""Enable and disable running services."""
@args('--host', metavar='<host>', help='Host')
@args('--service', metavar='<service>', help='Nova service')
def list(self, host=None, service=None):
"""
Show a list of all running services. Filter by host & service name.
"""
servicegroup_api = servicegroup.API()
ctxt = context.get_admin_context()
services = db.service_get_all(ctxt)
services = availability_zones.set_availability_zones(ctxt, services)
if host:
services = [s for s in services if s['host'] == host]
if service:
services = [s for s in services if s['binary'] == service]
print_format = "%-16s %-36s %-16s %-10s %-5s %-10s"
print print_format % (
_('Binary'),
_('Host'),
_('Zone'),
_('Status'),
_('State'),
_('Updated_At'))
for svc in services:
alive = servicegroup_api.service_is_up(svc)
art = (alive and ":-)") or "XXX"
active = 'enabled'
if svc['disabled']:
active = 'disabled'
print print_format % (svc['binary'], svc['host'],
svc['availability_zone'], active, art,
svc['updated_at'])
@args('--host', metavar='<host>', help='Host')
@args('--service', metavar='<service>', help='Nova service')
def enable(self, host, service):
"""Enable scheduling for a service."""
ctxt = context.get_admin_context()
try:
svc = db.service_get_by_args(ctxt, host, service)
db.service_update(ctxt, svc['id'], {'disabled': False})
except exception.NotFound as ex:
print _("error: %s") % ex
return(2)
print _("Service %(service)s on host %(host)s enabled.") % locals()
@args('--host', metavar='<host>', help='Host')
@args('--service', metavar='<service>', help='Nova service')
def disable(self, host, service):
"""Disable scheduling for a service."""
ctxt = context.get_admin_context()
try:
svc = db.service_get_by_args(ctxt, host, service)
db.service_update(ctxt, svc['id'], {'disabled': True})
except exception.NotFound as ex:
print _("error: %s") % ex
return(2)
print _("Service %(service)s on host %(host)s disabled.") % locals()
def _show_host_resources(self, context, host):
"""Shows the physical/usage resource given by hosts.
:param context: security context
:param host: hostname
:returns:
example format is below::
{'resource':D, 'usage':{proj_id1:D, proj_id2:D}}
D: {'vcpus': 3, 'memory_mb': 2048, 'local_gb': 2048,
'vcpus_used': 12, 'memory_mb_used': 10240,
'local_gb_used': 64}
"""
# Getting compute node info and related instances info
service_ref = db.service_get_by_compute_host(context, host)
instance_refs = db.instance_get_all_by_host(context,
service_ref['host'])
# Getting total available/used resource
compute_ref = service_ref['compute_node'][0]
resource = {'vcpus': compute_ref['vcpus'],
'memory_mb': compute_ref['memory_mb'],
'local_gb': compute_ref['local_gb'],
'vcpus_used': compute_ref['vcpus_used'],
'memory_mb_used': compute_ref['memory_mb_used'],
'local_gb_used': compute_ref['local_gb_used']}
usage = dict()
if not instance_refs:
return {'resource': resource, 'usage': usage}
# Getting usage resource per project
project_ids = [i['project_id'] for i in instance_refs]
project_ids = list(set(project_ids))
for project_id in project_ids:
vcpus = [i['vcpus'] for i in instance_refs
if i['project_id'] == project_id]
mem = [i['memory_mb'] for i in instance_refs
if i['project_id'] == project_id]
root = [i['root_gb'] for i in instance_refs
if i['project_id'] == project_id]
ephemeral = [i['ephemeral_gb'] for i in instance_refs
if i['project_id'] == project_id]
usage[project_id] = {'vcpus': sum(vcpus),
'memory_mb': sum(mem),
'root_gb': sum(root),
'ephemeral_gb': sum(ephemeral)}
return {'resource': resource, 'usage': usage}
@args('--host', metavar='<host>', help='Host')
def describe_resource(self, host):
"""Describes cpu/memory/hdd info for host.
:param host: hostname.
"""
result = self._show_host_resources(context.get_admin_context(),
host=host)
if not isinstance(result, dict):
print _('An unexpected error has occurred.')
print _('[Result]'), result
else:
# Printing a total and used_now
# (NOTE)The host name width 16 characters
print '%(a)-25s%(b)16s%(c)8s%(d)8s%(e)8s' % {"a": _('HOST'),
"b": _('PROJECT'),
"c": _('cpu'),
"d": _('mem(mb)'),
"e": _('hdd')}
print ('%(a)-16s(total)%(b)26s%(c)8s%(d)8s' %
{"a": host,
"b": result['resource']['vcpus'],
"c": result['resource']['memory_mb'],
"d": result['resource']['local_gb']})
print ('%(a)-16s(used_now)%(b)23s%(c)8s%(d)8s' %
{"a": host,
"b": result['resource']['vcpus_used'],
"c": result['resource']['memory_mb_used'],
"d": result['resource']['local_gb_used']})
# Printing a used_max
cpu_sum = 0
mem_sum = 0
hdd_sum = 0
for p_id, val in result['usage'].items():
cpu_sum += val['vcpus']
mem_sum += val['memory_mb']
hdd_sum += val['root_gb']
hdd_sum += val['ephemeral_gb']
print '%(a)-16s(used_max)%(b)23s%(c)8s%(d)8s' % {"a": host,
"b": cpu_sum,
"c": mem_sum,
"d": hdd_sum}
for p_id, val in result['usage'].items():
print '%(a)-25s%(b)16s%(c)8s%(d)8s%(e)8s' % {
"a": host,
"b": p_id,
"c": val['vcpus'],
"d": val['memory_mb'],
"e": val['root_gb'] + val['ephemeral_gb']}
class HostCommands(object):
"""List hosts."""
def list(self, zone=None):
"""Show a list of all physical hosts. Filter by zone.
args: [zone]
"""
print "%-25s\t%-15s" % (_('host'),
_('zone'))
ctxt = context.get_admin_context()
services = db.service_get_all(ctxt)
services = availability_zones.set_availability_zones(ctxt, services)
if zone:
services = [s for s in services if s['availability_zone'] == zone]
hosts = []
for srv in services:
if not [h for h in hosts if h['host'] == srv['host']]:
hosts.append(srv)
for h in hosts:
print "%-25s\t%-15s" % (h['host'], h['availability_zone'])
class DbCommands(object):
"""Class for managing the database."""
def __init__(self):
pass
@args('--version', metavar='<version>', help='Database version')
def sync(self, version=None):
"""Sync the database up to the most recent version."""
return migration.db_sync(version)
def version(self):
"""Print the current database version."""
print migration.db_version()
@args('--max_rows', metavar='<number>',
help='Maximum number of deleted rows to archive')
def archive_deleted_rows(self, max_rows):
"""Move up to max_rows deleted rows from production tables to shadow
tables.
"""
if max_rows is not None:
max_rows = int(max_rows)
if max_rows < 0:
print _("Must supply a positive value for max_rows")
return(1)
admin_context = context.get_admin_context()
db.archive_deleted_rows(admin_context, max_rows)
class InstanceTypeCommands(object):
"""Class for managing instance types / flavors."""
def _print_instance_types(self, name, val):
is_public = ('private', 'public')[val["is_public"] == 1]
print ("%s: Memory: %sMB, VCPUS: %s, Root: %sGB, Ephemeral: %sGb, "
"FlavorID: %s, Swap: %sMB, RXTX Factor: %s, %s, ExtraSpecs %s") % (
name, val["memory_mb"], val["vcpus"], val["root_gb"],
val["ephemeral_gb"], val["flavorid"], val["swap"],
val["rxtx_factor"], is_public, val["extra_specs"])
@args('--name', metavar='<name>',
help='Name of instance type/flavor')
@args('--memory', metavar='<memory size>', help='Memory size')
@args('--cpu', dest='vcpus', metavar='<num cores>', help='Number cpus')
@args('--root_gb', metavar='<root_gb>', help='Root disk size')
@args('--ephemeral_gb', metavar='<ephemeral_gb>',
help='Ephemeral disk size')
@args('--flavor', dest='flavorid', metavar='<flavor id>',
help='Flavor ID')
@args('--swap', metavar='<swap>', help='Swap')
@args('--rxtx_factor', metavar='<rxtx_factor>', help='rxtx_factor')
@args('--is_public', metavar='<is_public>',
help='Make flavor accessible to the public')
def create(self, name, memory, vcpus, root_gb, ephemeral_gb=0,
flavorid=None, swap=0, rxtx_factor=1.0, is_public=True):
"""Creates instance types / flavors."""
try:
flavors.create(name, memory, vcpus, root_gb,
ephemeral_gb=ephemeral_gb, flavorid=flavorid,
swap=swap, rxtx_factor=rxtx_factor,
is_public=is_public)
except exception.InvalidInput as e:
print _("Must supply valid parameters to create instance_type")
print e
return(1)
except exception.InstanceTypeExists:
print _("Instance Type exists.")
print _("Please ensure instance_type name and flavorid are "
"unique.")
print _("Currently defined instance_type names and flavorids:")
print
self.list()
return(2)
except Exception:
print _("Unknown error")
return(3)
else:
print _("%s created") % name
@args('--name', metavar='<name>', help='Name of instance type/flavor')
def delete(self, name):
"""Marks instance types / flavors as deleted."""
try:
flavors.destroy(name)
except exception.InstanceTypeNotFound:
print _("Valid instance type name is required")
return(1)
except db_exc.DBError as e:
print _("DB Error: %s") % e
return(2)
except Exception:
return(3)
else:
print _("%s deleted") % name
@args('--name', metavar='<name>', help='Name of instance type/flavor')
def list(self, name=None):
"""Lists all active or specific instance types / flavors."""
try:
if name is None:
inst_types = flavors.get_all_flavors()
else:
inst_types = flavors.get_flavor_by_name(name)
except db_exc.DBError as e:
_db_error(e)
if isinstance(inst_types.values()[0], dict):
for k, v in inst_types.iteritems():
self._print_instance_types(k, v)
else:
self._print_instance_types(name, inst_types)
@args('--name', metavar='<name>', help='Name of instance type/flavor')
@args('--key', metavar='<key>', help='The key of the key/value pair')
@args('--value', metavar='<value>', help='The value of the key/value pair')
def set_key(self, name, key, value=None):
"""Add key/value pair to specified instance type's extra_specs."""
try:
try:
inst_type = flavors.get_flavor_by_name(name)
except exception.InstanceTypeNotFoundByName as e:
print e
return(2)
ctxt = context.get_admin_context()
ext_spec = {key: value}
db.flavor_extra_specs_update_or_create(
ctxt,
inst_type["flavorid"],
ext_spec)
print _("Key %(key)s set to %(value)s on instance"
" type %(name)s") % locals()
except db_exc.DBError as e:
_db_error(e)
@args('--name', metavar='<name>', help='Name of instance type/flavor')
@args('--key', metavar='<key>', help='The key to be deleted')
def unset_key(self, name, key):
"""Delete the specified extra spec for instance type."""
try:
try:
inst_type = flavors.get_flavor_by_name(name)
except exception.InstanceTypeNotFoundByName as e:
print e
return(2)
ctxt = context.get_admin_context()
db.flavor_extra_specs_delete(
ctxt,
inst_type["flavorid"],
key)
print _("Key %(key)s on instance type %(name)s unset") % locals()
except db_exc.DBError as e:
_db_error(e)
class AgentBuildCommands(object):
"""Class for managing agent builds."""
@args('--os', metavar='<os>', help='os')
@args('--architecture', dest='architecture',
metavar='<architecture>', help='architecture')
@args('--version', metavar='<version>', help='version')
@args('--url', metavar='<url>', help='url')
@args('--md5hash', metavar='<md5hash>', help='md5hash')
@args('--hypervisor', metavar='<hypervisor>',
help='hypervisor(default: xen)')
def create(self, os, architecture, version, url, md5hash,
hypervisor='xen'):
"""Creates a new agent build."""
ctxt = context.get_admin_context()
db.agent_build_create(ctxt, {'hypervisor': hypervisor,
'os': os,
'architecture': architecture,
'version': version,
'url': url,
'md5hash': md5hash})
@args('--os', metavar='<os>', help='os')
@args('--architecture', dest='architecture',
metavar='<architecture>', help='architecture')
@args('--hypervisor', metavar='<hypervisor>',
help='hypervisor(default: xen)')
def delete(self, os, architecture, hypervisor='xen'):
"""Deletes an existing agent build."""
ctxt = context.get_admin_context()
agent_build_ref = db.agent_build_get_by_triple(ctxt,
hypervisor, os, architecture)
db.agent_build_destroy(ctxt, agent_build_ref['id'])
@args('--hypervisor', metavar='<hypervisor>',
help='hypervisor(default: None)')
def list(self, hypervisor=None):
"""Lists all agent builds.
arguments: <none>
"""
fmt = "%-10s %-8s %12s %s"
ctxt = context.get_admin_context()
by_hypervisor = {}
for agent_build in db.agent_build_get_all(ctxt):
buildlist = by_hypervisor.get(agent_build.hypervisor)
if not buildlist:
buildlist = by_hypervisor[agent_build.hypervisor] = []
buildlist.append(agent_build)
for key, buildlist in by_hypervisor.iteritems():
if hypervisor and key != hypervisor:
continue
print _('Hypervisor: %s') % key
print fmt % ('-' * 10, '-' * 8, '-' * 12, '-' * 32)
for agent_build in buildlist:
print fmt % (agent_build.os, agent_build.architecture,
agent_build.version, agent_build.md5hash)
print ' %s' % agent_build.url
print
@args('--os', metavar='<os>', help='os')
@args('--architecture', dest='architecture',
metavar='<architecture>', help='architecture')
@args('--version', metavar='<version>', help='version')
@args('--url', metavar='<url>', help='url')
@args('--md5hash', metavar='<md5hash>', help='md5hash')
@args('--hypervisor', metavar='<hypervisor>',
help='hypervisor(default: xen)')
def modify(self, os, architecture, version, url, md5hash,
hypervisor='xen'):
"""Update an existing agent build."""
ctxt = context.get_admin_context()
agent_build_ref = db.agent_build_get_by_triple(ctxt,
hypervisor, os, architecture)
db.agent_build_update(ctxt, agent_build_ref['id'],
{'version': version,
'url': url,
'md5hash': md5hash})
class GetLogCommands(object):
"""Get logging information."""
def errors(self):
"""Get all of the errors from the log files."""
error_found = 0
if CONF.log_dir:
logs = [x for x in os.listdir(CONF.log_dir) if x.endswith('.log')]
for file in logs:
log_file = os.path.join(CONF.log_dir, file)
lines = [line.strip() for line in open(log_file, "r")]
lines.reverse()
print_name = 0
for index, line in enumerate(lines):
if line.find(" ERROR ") > 0:
error_found += 1
if print_name == 0:
print log_file + ":-"
print_name = 1
linenum = len(lines) - index
print _('Line %(linenum)d : %(line)s') % locals()
if error_found == 0:
print _('No errors in logfiles!')
@args('--num_entries', metavar='<number of entries>',
help='number of entries(default: 10)')
def syslog(self, num_entries=10):
"""Get <num_entries> of the nova syslog events."""
entries = int(num_entries)
count = 0
log_file = ''
if os.path.exists('/var/log/syslog'):
log_file = '/var/log/syslog'
elif os.path.exists('/var/log/messages'):
log_file = '/var/log/messages'
else:
print _('Unable to find system log file!')
return(1)
lines = [line.strip() for line in open(log_file, "r")]
lines.reverse()
print _('Last %s nova syslog entries:-') % (entries)
for line in lines:
if line.find("nova") > 0:
count += 1
print "%s" % (line)
if count == entries:
break
if count == 0:
print _('No nova entries in syslog!')
class CellCommands(object):
"""Commands for managing cells."""
@args('--name', metavar='<name>', help='Name for the new cell')
@args('--cell_type', metavar='<parent|child>',
help='Whether the cell is a parent or child')
@args('--username', metavar='<username>',
help='Username for the message broker in this cell')
@args('--password', metavar='<password>',
help='Password for the message broker in this cell')
@args('--hostname', metavar='<hostname>',
help='Address of the message broker in this cell')
@args('--port', metavar='<number>',
help='Port number of the message broker in this cell')
@args('--virtual_host', metavar='<virtual_host>',
help='The virtual host of the message broker in this cell')
@args('--woffset', metavar='<float>')
@args('--wscale', metavar='<float>')
def create(self, name, cell_type='child', username=None, password=None,
hostname=None, port=None, virtual_host=None,
woffset=None, wscale=None):
if cell_type not in ['parent', 'child']:
print "Error: cell type must be 'parent' or 'child'"
return(2)
# Set up the transport URL
transport = {
'username': username,
'password': password,
'hostname': hostname,
'port': int(port),
'virtual_host': virtual_host,
}
transport_url = rpc_driver.unparse_transport_url(transport)
is_parent = cell_type == 'parent'
values = {'name': name,
'is_parent': is_parent,
'transport_url': transport_url,
'weight_offset': float(woffset),
'weight_scale': float(wscale)}
ctxt = context.get_admin_context()
db.cell_create(ctxt, values)
@args('--cell_name', metavar='<cell_name>',
help='Name of the cell to delete')
def delete(self, cell_name):
ctxt = context.get_admin_context()
db.cell_delete(ctxt, cell_name)
def list(self):
ctxt = context.get_admin_context()
cells = db.cell_get_all(ctxt)
fmt = "%3s %-10s %-6s %-10s %-15s %-5s %-10s"
print fmt % ('Id', 'Name', 'Type', 'Username', 'Hostname',
'Port', 'VHost')
print fmt % ('-' * 3, '-' * 10, '-' * 6, '-' * 10, '-' * 15,
'-' * 5, '-' * 10)
for cell in cells:
transport = rpc_driver.parse_transport_url(cell.transport_url)
print fmt % (cell.id, cell.name,
'parent' if cell.is_parent else 'child',
transport['username'], transport['hostname'],
transport['port'], transport['virtual_host'])
print fmt % ('-' * 3, '-' * 10, '-' * 6, '-' * 10, '-' * 15,
'-' * 5, '-' * 10)
CATEGORIES = {
'account': AccountCommands,
'agent': AgentBuildCommands,
'cell': CellCommands,
'db': DbCommands,
'fixed': FixedIpCommands,
'flavor': InstanceTypeCommands,
'floating': FloatingIpCommands,
'host': HostCommands,
'instance_type': InstanceTypeCommands,
'logs': GetLogCommands,
'network': NetworkCommands,
'project': ProjectCommands,
'service': ServiceCommands,
'shell': ShellCommands,
'vm': VmCommands,
'vpn': VpnCommands,
}
def methods_of(obj):
"""Get all callable methods of an object that don't start with underscore
returns a list of tuples of the form (method_name, method)
"""
result = []
for i in dir(obj):
if callable(getattr(obj, i)) and not i.startswith('_'):
result.append((i, getattr(obj, i)))
return result
def add_command_parsers(subparsers):
parser = subparsers.add_parser('version')
parser = subparsers.add_parser('bash-completion')
parser.add_argument('query_category', nargs='?')
for category in CATEGORIES:
command_object = CATEGORIES[category]()
parser = subparsers.add_parser(category)
parser.set_defaults(command_object=command_object)
category_subparsers = parser.add_subparsers(dest='action')
for (action, action_fn) in methods_of(command_object):
parser = category_subparsers.add_parser(action)
action_kwargs = []
for args, kwargs in getattr(action_fn, 'args', []):
# FIXME(markmc): hack to assume dest is the arg name without
# the leading hyphens if no dest is supplied
kwargs.setdefault('dest', args[0][2:])
if kwargs['dest'].startswith('action_kwarg_'):
action_kwargs.append(
kwargs['dest'][len('action_kwarg_'):])
else:
action_kwargs.append(kwargs['dest'])
kwargs['dest'] = 'action_kwarg_' + kwargs['dest']
parser.add_argument(*args, **kwargs)
parser.set_defaults(action_fn=action_fn)
parser.set_defaults(action_kwargs=action_kwargs)
parser.add_argument('action_args', nargs='*')
category_opt = cfg.SubCommandOpt('category',
title='Command categories',
help='Available categories',
handler=add_command_parsers)
def main():
"""Parse options and call the appropriate class/method."""
CONF.register_cli_opt(category_opt)
try:
config.parse_args(sys.argv)
logging.setup("nova")
except cfg.ConfigFilesNotFoundError:
cfgfile = CONF.config_file[-1] if CONF.config_file else None
if cfgfile and not os.access(cfgfile, os.R_OK):
st = os.stat(cfgfile)
print _("Could not read %s. Re-running with sudo") % cfgfile
try:
os.execvp('sudo', ['sudo', '-u', '#%s' % st.st_uid] + sys.argv)
except Exception:
print _('sudo failed, continuing as if nothing happened')
print _('Please re-run nova-manage as root.')
return(2)
if CONF.category.name == "version":
print version.version_string_with_package()
return(0)
if CONF.category.name == "bash-completion":
if not CONF.category.query_category:
print " ".join(CATEGORIES.keys())
elif CONF.category.query_category in CATEGORIES:
fn = CATEGORIES[CONF.category.query_category]
command_object = fn()
actions = methods_of(command_object)
print " ".join([k for (k, v) in actions])
return(0)
fn = CONF.category.action_fn
fn_args = [arg.decode('utf-8') for arg in CONF.category.action_args]
fn_kwargs = {}
for k in CONF.category.action_kwargs:
v = getattr(CONF.category, 'action_kwarg_' + k)
if v is None:
continue
if isinstance(v, basestring):
v = v.decode('utf-8')
fn_kwargs[k] = v
# call the action with the remaining arguments
# check arguments
try:
cliutils.validate_args(fn, *fn_args, **fn_kwargs)
except cliutils.MissingArgs as e:
# NOTE(mikal): this isn't the most helpful error message ever. It is
# long, and tells you a lot of things you probably don't want to know
# if you just got a single arg wrong.
print fn.__doc__
CONF.print_help()
print e
return(1)
try:
ret = fn(*fn_args, **fn_kwargs)
rpc.cleanup()
return(ret)
except Exception:
print _("Command failed, please check log for more info")
raise
| {
"content_hash": "7d31ba86fd0e3df01751434e096b8b5a",
"timestamp": "",
"source": "github",
"line_count": 1289,
"max_line_length": 79,
"avg_line_length": 39.88130333591932,
"alnum_prop": 0.5206489388604665,
"repo_name": "Brocade-OpenSource/OpenStack-DNRM-Nova",
"id": "e9d50dfe31366b8ee8e68cef1ef09ca51c1a25a5",
"size": "53867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/cmd/manage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11515074"
},
{
"name": "Shell",
"bytes": "17148"
}
],
"symlink_target": ""
} |
"""
File : generic.py
Author : Bjorn Barrefors <bjorn dot peter dot barrefors AT cern dot ch>
Description: Generic service module
"""
# system modules
import logging
# package modules
from cuadrnt.utils.web_utils import get_secure_data
from cuadrnt.utils.web_utils import get_data
from cuadrnt.data_management.core.storage import StorageManager
class GenericService(object):
"""
Generic cuadrnt service class
Shared properties between services:
Contact a web service using a base url and some key:value parameters
Services require a valid cert and key
Want to cache results in a document-oriented database
"""
def __init__(self, config=dict()):
self.logger = logging.getLogger(__name__)
self.config = config
self.storage = StorageManager(self.config)
self.SERVICE = 'generic'
self.TARGET_URL = ''
def fetch(self, api, params=dict(), method='get', secure=True, cache=True, cache_only=False, force_cache=False):
"""
Get data from url using parameters params
If param cache is true update cache on cache miss
If param cache_only is true just update the cache, don't return any data.
Use this parameter to spawn external thread to update cache in background
"""
if cache:
json_data = dict()
if not force_cache:
json_data = self.storage.get_cache(self.SERVICE, api, params)
if not json_data:
if secure:
json_data = get_secure_data(target_url=self.TARGET_URL, api=api, params=params, method=method)
else:
json_data = get_data(target_url=self.TARGET_URL, api=api, file_=params)
if type(json_data) is not dict:
json_data = {'data':json_data}
self.storage.insert_cache(self.SERVICE, api, params, json_data)
if not cache_only:
return json_data
else:
if secure:
json_data = get_secure_data(target_url=self.TARGET_URL, api=api, params=params, method=method)
else:
json_data = get_data(target_url=self.TARGET_URL, api=api, file_=params)
if type(json_data) is not dict:
json_data = {'data':json_data}
return json_data
| {
"content_hash": "fcb29be3370b670e53a3b069c0a6ec0b",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 116,
"avg_line_length": 40.93103448275862,
"alnum_prop": 0.6112047177759057,
"repo_name": "vlimant/IntelROCCS",
"id": "1f9e7ff4daa4c90bc44835968850754b227bbbec",
"size": "2399",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "CUADRnT/src/python/cuadrnt/data_management/services/generic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5144"
},
{
"name": "CSS",
"bytes": "824"
},
{
"name": "HTML",
"bytes": "29707"
},
{
"name": "JavaScript",
"bytes": "6131"
},
{
"name": "Python",
"bytes": "614274"
},
{
"name": "Shell",
"bytes": "37942"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import sys, os
from . import common
from . import engine
from proton import *
from .common import pump, Skipped
from proton._compat import str2bin
def _sslCertpath(file):
""" Return the full path to the certificate,keyfile, etc.
"""
return os.path.join(os.path.dirname(__file__),
"ssl_db/%s" % file)
def _testSaslMech(self, mech, clientUser='user@proton', authUser='user@proton', encrypted=False, authenticated=True):
self.s1.allowed_mechs(mech)
self.c1.open()
self.c2.open()
pump(self.t1, self.t2, 1024)
if encrypted is not None:
assert self.t2.encrypted == encrypted, encrypted
assert self.t1.encrypted == encrypted, encrypted
assert self.t2.authenticated == authenticated, authenticated
assert self.t1.authenticated == authenticated, authenticated
if authenticated:
# Server
assert self.t2.user == authUser
assert self.s2.user == authUser
assert self.s2.mech == mech.strip()
assert self.s2.outcome == SASL.OK, self.s2.outcome
assert self.c2.state & Endpoint.LOCAL_ACTIVE and self.c2.state & Endpoint.REMOTE_ACTIVE,\
"local_active=%s, remote_active=%s" % (self.c1.state & Endpoint.LOCAL_ACTIVE, self.c1.state & Endpoint.REMOTE_ACTIVE)
# Client
assert self.t1.user == clientUser
assert self.s1.user == clientUser
assert self.s1.mech == mech.strip()
assert self.s1.outcome == SASL.OK, self.s1.outcome
assert self.c1.state & Endpoint.LOCAL_ACTIVE and self.c1.state & Endpoint.REMOTE_ACTIVE,\
"local_active=%s, remote_active=%s" % (self.c1.state & Endpoint.LOCAL_ACTIVE, self.c1.state & Endpoint.REMOTE_ACTIVE)
else:
# Server
assert self.t2.user == None
assert self.s2.user == None
assert self.s2.outcome != SASL.OK, self.s2.outcome
# Client
assert self.t1.user == clientUser
assert self.s1.user == clientUser
assert self.s1.outcome != SASL.OK, self.s1.outcome
class Test(common.Test):
pass
class SaslTest(Test):
def setUp(self):
self.t1 = Transport()
self.s1 = SASL(self.t1)
self.t2 = Transport(Transport.SERVER)
self.t2.max_frame_size = 65536
self.s2 = SASL(self.t2)
def pump(self):
pump(self.t1, self.t2, 1024)
# Note that due to server protocol autodetect, there can be no "pipelining"
# of protocol frames from the server end only from the client end.
#
# This is because the server cannot know which protocol layers are active
# and therefore which headers need to be sent,
# until it sees the respective protocol headers from the client.
def testPipelinedClient(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support client pipelining")
# Client
self.s1.allowed_mechs('ANONYMOUS')
# Server
self.s2.allowed_mechs('ANONYMOUS')
assert self.s1.outcome is None
assert self.s2.outcome is None
# Push client bytes into server
out1 = self.t1.peek(1024)
self.t1.pop(len(out1))
self.t2.push(out1)
out2 = self.t2.peek(1024)
self.t2.pop(len(out2))
assert self.s1.outcome is None
self.t1.push(out2)
assert self.s1.outcome == SASL.OK
assert self.s2.outcome == SASL.OK
def testPipelinedClientFail(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support client pipelining")
# Client
self.s1.allowed_mechs('ANONYMOUS')
# Server
self.s2.allowed_mechs('PLAIN DIGEST-MD5 SCRAM-SHA-1')
assert self.s1.outcome is None
assert self.s2.outcome is None
# Push client bytes into server
out1 = self.t1.peek(1024)
self.t1.pop(len(out1))
self.t2.push(out1)
out2 = self.t2.peek(1024)
self.t2.pop(len(out2))
assert self.s1.outcome is None
self.t1.push(out2)
assert self.s1.outcome == SASL.AUTH
assert self.s2.outcome == SASL.AUTH
def testSaslAndAmqpInSingleChunk(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support client pipelining")
self.s1.allowed_mechs('ANONYMOUS')
self.s2.allowed_mechs('ANONYMOUS')
# do some work to generate AMQP data
c1 = Connection()
c2 = Connection()
self.t1.bind(c1)
c1._transport = self.t1
self.t2.bind(c2)
c2._transport = self.t2
c1.open()
# get all t1's output in one buffer then pass it all to t2
out1_sasl_and_amqp = str2bin("")
t1_still_producing = True
while t1_still_producing:
out1 = self.t1.peek(1024)
self.t1.pop(len(out1))
out1_sasl_and_amqp += out1
t1_still_producing = out1
t2_still_consuming = True
while t2_still_consuming:
num = min(self.t2.capacity(), len(out1_sasl_and_amqp))
self.t2.push(out1_sasl_and_amqp[:num])
out1_sasl_and_amqp = out1_sasl_and_amqp[num:]
t2_still_consuming = num > 0 and len(out1_sasl_and_amqp) > 0
assert len(out1_sasl_and_amqp) == 0, (len(out1_sasl_and_amqp), out1_sasl_and_amqp)
# check that t2 processed both the SASL data and the AMQP data
assert self.s2.outcome == SASL.OK
assert c2.state & Endpoint.REMOTE_ACTIVE
def testPipelined2(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support client pipelining")
out1 = self.t1.peek(1024)
self.t1.pop(len(out1))
self.t2.push(out1)
self.s2.allowed_mechs('ANONYMOUS')
c2 = Connection()
c2.open()
self.t2.bind(c2)
out2 = self.t2.peek(1024)
self.t2.pop(len(out2))
self.t1.push(out2)
out1 = self.t1.peek(1024)
assert len(out1) > 0
def testFracturedSASL(self):
""" PROTON-235
"""
assert self.s1.outcome is None
# self.t1.trace(Transport.TRACE_FRM)
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("AMQP\x03\x01\x00\x00"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("\x00\x00\x00"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("6\x02\x01\x00\x00\x00S@\xc04\x01\xe01\x04\xa3\x05PLAIN\x0aDIGEST-MD5\x09ANONYMOUS\x08CRAM-MD5"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("\x00\x00\x00\x10\x02\x01\x00\x00\x00SD\xc0\x03\x01P\x00"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
while out:
out = self.t1.peek(1024)
self.t1.pop(len(out))
assert self.s1.outcome == SASL.OK, self.s1.outcome
def test_singleton(self):
"""Verify that only a single instance of SASL can exist per Transport"""
transport = Transport()
attr = object()
sasl1 = SASL(transport)
sasl1.my_attribute = attr
sasl2 = transport.sasl()
sasl3 = SASL(transport)
assert sasl1 == sasl2
assert sasl1 == sasl3
assert sasl1.my_attribute == attr
assert sasl2.my_attribute == attr
assert sasl3.my_attribute == attr
transport = Transport()
sasl1 = transport.sasl()
sasl1.my_attribute = attr
sasl2 = SASL(transport)
assert sasl1 == sasl2
assert sasl1.my_attribute == attr
assert sasl2.my_attribute == attr
def testSaslSkipped(self):
"""Verify that the server (with SASL) correctly handles a client without SASL"""
self.t1 = Transport()
self.t2.require_auth(False)
self.pump()
assert self.s2.outcome == None
assert self.t2.condition == None
assert self.t2.authenticated == False
assert self.s1.outcome == None
assert self.t1.condition == None
assert self.t1.authenticated == False
def testSaslSkippedFail(self):
"""Verify that the server (with SASL) correctly handles a client without SASL"""
self.t1 = Transport()
self.t2.require_auth(True)
self.pump()
assert self.s2.outcome == None
assert self.t2.condition != None
assert self.s1.outcome == None
assert self.t1.condition != None
def testMechNotFound(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support checking authentication state")
self.c1 = Connection()
self.c1.open()
self.t1.bind(self.c1)
self.s1.allowed_mechs('IMPOSSIBLE')
self.pump()
assert self.t2.authenticated == False
assert self.t1.authenticated == False
assert self.s1.outcome != SASL.OK
assert self.s2.outcome != SASL.OK
class SASLMechTest(Test):
def setUp(self):
self.t1 = Transport()
self.s1 = SASL(self.t1)
self.t2 = Transport(Transport.SERVER)
self.s2 = SASL(self.t2)
self.c1 = Connection()
self.c1.user = 'user@proton'
self.c1.password = 'password'
self.c1.hostname = 'localhost'
self.c2 = Connection()
def testANON(self):
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'ANONYMOUS', authUser='anonymous')
def testCRAMMD5(self):
common.ensureCanTestExtendedSASL()
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'CRAM-MD5')
def testDIGESTMD5(self):
common.ensureCanTestExtendedSASL()
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5')
# PLAIN shouldn't work without encryption without special setting
def testPLAINfail(self):
common.ensureCanTestExtendedSASL()
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'PLAIN', authenticated=False)
# Client won't accept PLAIN even if offered by server without special setting
def testPLAINClientFail(self):
common.ensureCanTestExtendedSASL()
self.s2.allow_insecure_mechs = True
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'PLAIN', authenticated=False)
# PLAIN will only work if both ends are specially set up
def testPLAIN(self):
common.ensureCanTestExtendedSASL()
self.s1.allow_insecure_mechs = True
self.s2.allow_insecure_mechs = True
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'PLAIN')
# SCRAM not supported before Cyrus SASL 2.1.26
# so not universal and hance need a test for support
# to keep it in tests.
# def testSCRAMSHA1(self):
# common.ensureCanTestExtendedSASL()
#
# self.t1.bind(self.c1)
# self.t2.bind(self.c2)
# _testSaslMech(self, 'SCRAM-SHA-1')
def _sslConnection(domain, transport, connection):
transport.bind(connection)
ssl = SSL(transport, domain, None )
return connection
class SSLSASLTest(Test):
def setUp(self):
if not common.isSSLPresent():
raise Skipped("No SSL libraries found.")
self.server_domain = SSLDomain(SSLDomain.MODE_SERVER)
self.client_domain = SSLDomain(SSLDomain.MODE_CLIENT)
self.t1 = Transport()
self.s1 = SASL(self.t1)
self.t2 = Transport(Transport.SERVER)
self.s2 = SASL(self.t2)
self.c1 = Connection()
self.c2 = Connection()
def testSSLPlainSimple(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support SSL with SASL")
if not SASL.extended():
raise Skipped("Simple SASL server does not support PLAIN")
common.ensureCanTestExtendedSASL()
clientUser = 'user@proton'
mech = 'PLAIN'
self.c1.user = clientUser
self.c1.password = 'password'
self.c1.hostname = 'localhost'
ssl1 = _sslConnection(self.client_domain, self.t1, self.c1)
ssl2 = _sslConnection(self.server_domain, self.t2, self.c2)
_testSaslMech(self, mech, encrypted=True)
def testSSLPlainSimpleFail(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support SSL with SASL")
if not SASL.extended():
raise Skipped("Simple SASL server does not support PLAIN")
common.ensureCanTestExtendedSASL()
clientUser = 'usr@proton'
mech = 'PLAIN'
self.c1.user = clientUser
self.c1.password = 'password'
self.c1.hostname = 'localhost'
ssl1 = _sslConnection(self.client_domain, self.t1, self.c1)
ssl2 = _sslConnection(self.server_domain, self.t2, self.c2)
_testSaslMech(self, mech, clientUser='usr@proton', encrypted=True, authenticated=False)
def testSSLExternalSimple(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support SSL with SASL")
extUser = 'O=Client,CN=127.0.0.1'
mech = 'EXTERNAL'
self.server_domain.set_credentials(_sslCertpath("server-certificate.pem"),
_sslCertpath("server-private-key.pem"),
"server-password")
self.server_domain.set_trusted_ca_db(_sslCertpath("ca-certificate.pem"))
self.server_domain.set_peer_authentication(SSLDomain.VERIFY_PEER,
_sslCertpath("ca-certificate.pem") )
self.client_domain.set_credentials(_sslCertpath("client-certificate.pem"),
_sslCertpath("client-private-key.pem"),
"client-password")
self.client_domain.set_trusted_ca_db(_sslCertpath("ca-certificate.pem"))
self.client_domain.set_peer_authentication(SSLDomain.VERIFY_PEER)
ssl1 = _sslConnection(self.client_domain, self.t1, self.c1)
ssl2 = _sslConnection(self.server_domain, self.t2, self.c2)
_testSaslMech(self, mech, clientUser=None, authUser=extUser, encrypted=True)
def testSSLExternalSimpleFail(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support SSL with SASL")
mech = 'EXTERNAL'
self.server_domain.set_credentials(_sslCertpath("server-certificate.pem"),
_sslCertpath("server-private-key.pem"),
"server-password")
self.server_domain.set_trusted_ca_db(_sslCertpath("ca-certificate.pem"))
self.server_domain.set_peer_authentication(SSLDomain.VERIFY_PEER,
_sslCertpath("ca-certificate.pem") )
self.client_domain.set_trusted_ca_db(_sslCertpath("ca-certificate.pem"))
self.client_domain.set_peer_authentication(SSLDomain.VERIFY_PEER)
ssl1 = _sslConnection(self.client_domain, self.t1, self.c1)
ssl2 = _sslConnection(self.server_domain, self.t2, self.c2)
_testSaslMech(self, mech, clientUser=None, authUser=None, encrypted=None, authenticated=False)
class SASLEventTest(engine.CollectorTest):
def setUp(self):
engine.CollectorTest.setUp(self)
self.t1 = Transport()
self.s1 = SASL(self.t1)
self.t2 = Transport(Transport.SERVER)
self.s2 = SASL(self.t2)
self.c1 = Connection()
self.c1.user = 'user@proton'
self.c1.password = 'password'
self.c1.hostname = 'localhost'
self.c2 = Connection()
self.collector = Collector()
def testNormalAuthenticationClient(self):
common.ensureCanTestExtendedSASL()
self.c1.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5')
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.CONNECTION_REMOTE_OPEN)
def testNormalAuthenticationServer(self):
common.ensureCanTestExtendedSASL()
self.c2.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5')
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.CONNECTION_REMOTE_OPEN)
def testFailedAuthenticationClient(self):
common.ensureCanTestExtendedSASL()
clientUser = "usr@proton"
self.c1.user = clientUser
self.c1.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5', clientUser=clientUser, authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_ERROR,
Event.TRANSPORT_TAIL_CLOSED,
Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testFailedAuthenticationServer(self):
common.ensureCanTestExtendedSASL()
clientUser = "usr@proton"
self.c1.user = clientUser
self.c2.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5', clientUser=clientUser, authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_ERROR,
Event.TRANSPORT_TAIL_CLOSED,
Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testNoMechClient(self):
common.ensureCanTestExtendedSASL()
self.c1.collect(self.collector)
self.s2.allowed_mechs('IMPOSSIBLE')
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5', authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_ERROR,
Event.TRANSPORT_TAIL_CLOSED, Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testNoMechServer(self):
common.ensureCanTestExtendedSASL()
self.c2.collect(self.collector)
self.s2.allowed_mechs('IMPOSSIBLE')
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'DIGEST-MD5', authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_TAIL_CLOSED,
Event.TRANSPORT_ERROR, Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testDisallowedMechClient(self):
self.c1.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'IMPOSSIBLE', authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_ERROR,
Event.TRANSPORT_TAIL_CLOSED, Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testDisallowedMechServer(self):
self.c2.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'IMPOSSIBLE', authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_TAIL_CLOSED,
Event.TRANSPORT_ERROR, Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testDisallowedPlainClient(self):
self.c1.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'PLAIN', authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_ERROR,
Event.TRANSPORT_TAIL_CLOSED, Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testDisallowedPlainServer(self):
self.c2.collect(self.collector)
self.t1.bind(self.c1)
self.t2.bind(self.c2)
_testSaslMech(self, 'PLAIN', authenticated=False)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_TAIL_CLOSED,
Event.TRANSPORT_ERROR, Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
| {
"content_hash": "ac30d12a195e1805b5e9a9b5a8a6073d",
"timestamp": "",
"source": "github",
"line_count": 575,
"max_line_length": 123,
"avg_line_length": 33.563478260869566,
"alnum_prop": 0.6687911290740453,
"repo_name": "bozzzzo/qpid-proton",
"id": "75b4828c9729f9504ba52ed6756d7088a96ba0b8",
"size": "20090",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/python/proton_tests/sasl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1234902"
},
{
"name": "C++",
"bytes": "312016"
},
{
"name": "CMake",
"bytes": "86437"
},
{
"name": "Groff",
"bytes": "420"
},
{
"name": "HTML",
"bytes": "8169"
},
{
"name": "Java",
"bytes": "1798487"
},
{
"name": "JavaScript",
"bytes": "244212"
},
{
"name": "PHP",
"bytes": "31076"
},
{
"name": "Perl",
"bytes": "100876"
},
{
"name": "Perl6",
"bytes": "878"
},
{
"name": "Python",
"bytes": "628476"
},
{
"name": "Ruby",
"bytes": "335237"
},
{
"name": "Shell",
"bytes": "10889"
}
],
"symlink_target": ""
} |
import sys
def run_once():
'''
A helper that takes one trip through the event-loop to process any
pending Futures. This is a no-op for Twisted, because you don't
need to use the event-loop to get callbacks to happen in Twisted.
'''
import txaio
if txaio.using_twisted:
return
try:
import asyncio
if sys.version_info >= (3, 7):
# https://github.com/crossbario/txaio/issues/139
from _asyncio_test_utils import run_once as _run_once
else:
from asyncio.test_utils import run_once as _run_once
return _run_once(txaio.config.loop or asyncio.get_event_loop())
except ImportError:
import trollius as asyncio
# let any trollius import error out; if we're not using
# twisted, and have no asyncio *and* no trollius, that's a
# problem.
# copied from asyncio.testutils because trollius has no
# testutils"
# just like modern asyncio.testutils.run_once does it...
loop = asyncio.get_event_loop()
loop.stop()
loop.run_forever()
asyncio.gather(*asyncio.Task.all_tasks())
def _await(future):
'''
Essentially just a way to call "run_until_complete" that becomes a
no-op if we're using Twisted.
'''
import txaio
if txaio.using_twisted:
return
try:
import asyncio
except ImportError:
import trollius as asyncio
asyncio.get_event_loop().run_until_complete(future)
| {
"content_hash": "e410de83f849b76bc6217ea77794abce",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 71,
"avg_line_length": 27.581818181818182,
"alnum_prop": 0.6242584047462096,
"repo_name": "crossbario/txaio",
"id": "9f56778c7d295e8b67ec0e18475b2fd27adc734f",
"size": "2811",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1285"
},
{
"name": "Python",
"bytes": "146564"
},
{
"name": "Shell",
"bytes": "2121"
}
],
"symlink_target": ""
} |
import os
from pathlib import Path
import json
def up(config, database, semester, course):
course_dir = Path(config.submitty['submitty_data_dir'], 'courses', semester, course)
config_file = Path(course_dir, 'config', 'config.json')
if config_file.is_file():
j = json.load(open(config_file,'r'))
if not 'seating_only_for_instructor' in j['course_details']:
j['course_details']['seating_only_for_instructor'] = False
json.dump(j,open(config_file,'w'))
def down(config, database, semester, course):
pass
| {
"content_hash": "d44b74dd88e6f3c824cd6d7aea0e799c",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 88,
"avg_line_length": 29.42105263157895,
"alnum_prop": 0.6565295169946332,
"repo_name": "Submitty/Submitty",
"id": "6e00cf7292144f43eabf28ebd4373044dac65831",
"size": "559",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "migration/migrator/migrations/course/20190311155549_seating_instructor_only.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8450"
},
{
"name": "C++",
"bytes": "496998"
},
{
"name": "CMake",
"bytes": "1561"
},
{
"name": "CSS",
"bytes": "210295"
},
{
"name": "HTML",
"bytes": "799796"
},
{
"name": "Java",
"bytes": "3828"
},
{
"name": "JavaScript",
"bytes": "981630"
},
{
"name": "PHP",
"bytes": "3103857"
},
{
"name": "PLpgSQL",
"bytes": "122825"
},
{
"name": "Python",
"bytes": "1589891"
},
{
"name": "Shell",
"bytes": "205161"
},
{
"name": "TeX",
"bytes": "21960"
},
{
"name": "Twig",
"bytes": "1239136"
},
{
"name": "TypeScript",
"bytes": "17328"
}
],
"symlink_target": ""
} |
import time
import tensorflow as tf
import ffn
from ffn import *
if __name__ == '__main__':
minibatchSize = 1024
program_start_time = time.time()
# Create the model
if (FLAGS.noInputFeed):
features, labels = getFakeMinibatch(minibatchSize)
else:
features = tf.placeholder("float", [None, featureDim])
labels = tf.placeholder("float", [None, labelDim])
crossEntropy, accuracy = getLossAndAccuracyForSubBatch(features, labels)
trainStep = tf.train.GradientDescentOptimizer(0.01).minimize(crossEntropy)
# Train
sess = tf.Session(config=tf.ConfigProto(log_device_placement=FLAGS.logDevicePlacement))
init = tf.initialize_all_variables()
sess.run(init)
perMinibatchTime = []
for i in range(numMinibatches):
if (FLAGS.noInputFeed == False):
minibatchFeatures, minibatchLabels = getFakeMinibatch(minibatchSize)
startTime = time.time()
if (FLAGS.noInputFeed):
sess.run([trainStep, accuracy])
else:
sess.run([trainStep, accuracy], feed_dict={features: minibatchFeatures, labels: minibatchLabels})
currMinibatchDuration = time.time() - startTime
perMinibatchTime.append(currMinibatchDuration)
printTrainingStats(1, minibatchSize, perMinibatchTime)
program_end_time = time.time()
print('Program finished, Total seconds: %s' % (program_end_time - program_start_time))
| {
"content_hash": "7353241db253d77c6a4b66d7d559c06a",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 105,
"avg_line_length": 32.86363636363637,
"alnum_prop": 0.6784232365145229,
"repo_name": "linmajia/dlbench",
"id": "2ed7ba401c476a9eb10ca249c59cbdf1be03535b",
"size": "1516",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "synthetic/experiments/tensorflow/fc/ffn_exp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "122057"
},
{
"name": "PowerShell",
"bytes": "1152"
},
{
"name": "Python",
"bytes": "252271"
},
{
"name": "Shell",
"bytes": "87021"
}
],
"symlink_target": ""
} |
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import dson
from dson._compact import long
def test_dump():
sio = StringIO()
dson.dump({}, sio)
assert sio.getvalue() == 'such wow'
def test_dumps():
assert dson.dumps({}) == 'such wow'
def test_encode_truefalse():
assert dson.dumps({True: False, False: True}, sort_keys=True) == \
'such "no" is yes, "yes" is no wow'
assert dson.dumps(
{2: 3.0, 4.0: long(5), False: 1, long(6): True}, sort_keys=True) == \
'such "no" is 1, "2" is 3.0, "4.0" is 5, "6" is yes wow'
# Issue 16228: Crash on encoding resized list
def test_encode_mutated():
a = [object()] * 10
def crasher(obj):
del a[-1]
assert dson.dumps(a, default=crasher) == 'so empty and empty and empty and empty and empty many'
| {
"content_hash": "6e48cbd4dcb43c3079cec64f9e233ef1",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 100,
"avg_line_length": 29.482758620689655,
"alnum_prop": 0.6046783625730994,
"repo_name": "soasme/dogeon",
"id": "8564271d83f24843733971fef6eda9ae8af7e281",
"size": "855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_dump.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "76234"
}
],
"symlink_target": ""
} |
import os.path
import re # for folderfilter
from threading import Lock
boxes = {}
localroots = {}
config = None
accounts = None
mblock = Lock()
def init(conf, accts):
global config, accounts
config = conf
accounts = accts
def add(accountname, foldername, localfolders):
if not accountname in boxes:
boxes[accountname] = []
localroots[accountname] = localfolders
if not foldername in boxes[accountname]:
boxes[accountname].append(foldername)
def write(allcomplete):
incremental = config.getdefaultboolean("mbnames", "incremental", False)
# Skip writing if we don't want incremental writing and we're not done.
if not incremental and not allcomplete:
return
# Skip writing if we want incremental writing and we're done.
if incremental and allcomplete:
return
# See if we're ready to write it out.
for account in accounts:
if account not in boxes:
return
__genmbnames()
def __genmbnames():
"""Takes a configparser object and a boxlist, which is a list of hashes
containing 'accountname' and 'foldername' keys."""
xforms = [os.path.expanduser, os.path.expandvars]
mblock.acquire()
try:
localeval = config.getlocaleval()
if not config.getdefaultboolean("mbnames", "enabled", 0):
return
path = config.apply_xforms(config.get("mbnames", "filename"), xforms)
file = open(path, "wt")
file.write(localeval.eval(config.get("mbnames", "header")))
folderfilter = lambda accountname, foldername: 1
if config.has_option("mbnames", "folderfilter"):
folderfilter = localeval.eval(config.get("mbnames", "folderfilter"),
{'re': re})
mb_sort_keyfunc = lambda d: (d['accountname'], d['foldername'])
if config.has_option("mbnames", "sort_keyfunc"):
mb_sort_keyfunc = localeval.eval(config.get("mbnames", "sort_keyfunc"),
{'re': re})
itemlist = []
for accountname in boxes.keys():
localroot = localroots[accountname]
for foldername in boxes[accountname]:
if folderfilter(accountname, foldername):
itemlist.append({'accountname': accountname,
'foldername': foldername,
'localfolders': localroot})
itemlist.sort(key = mb_sort_keyfunc)
format_string = config.get("mbnames", "peritem", raw=1)
itemlist = [format_string % d for d in itemlist]
file.write(localeval.eval(config.get("mbnames", "sep")).join(itemlist))
file.write(localeval.eval(config.get("mbnames", "footer")))
file.close()
finally:
mblock.release()
| {
"content_hash": "9d0362355d533bb3bb5b43d061a43124",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 83,
"avg_line_length": 37.12987012987013,
"alnum_prop": 0.6012591815320042,
"repo_name": "frioux/offlineimap",
"id": "8829ee52a719cb79daf658ffdc145ca2ce360dbf",
"size": "3676",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "offlineimap/mbnames.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2022"
},
{
"name": "Python",
"bytes": "543162"
},
{
"name": "Shell",
"bytes": "12224"
}
],
"symlink_target": ""
} |
import json
import os
import shutil
import zipfile
from build import cd
def create_template(name, path, **kw):
os.makedirs(os.path.join(path, 'module'))
with open(os.path.join(path, 'module', 'manifest.json'), 'w') as manifest_file:
manifest = {
"name": name,
"version": "0.1",
"description": "My module template"
}
with open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'platform_version.txt'))) as platform_version_file:
manifest['platform_version'] = platform_version_file.read()
json.dump(manifest, manifest_file, indent=4, sort_keys=True)
# Copy template module
template_path = os.path.abspath(os.path.join(os.path.split(__file__)[0], 'templatemodule'))
for root, dirnames, filenames in os.walk(template_path):
for filename in filenames:
relative_path = os.path.join(root, filename)[len(template_path)+1:]
with open(os.path.join(root, filename), 'r') as source:
lines = source.readlines()
new_dir = os.path.split(os.path.join(path, 'module', relative_path.replace('templatemodule', name)))[0]
if not os.path.isdir(new_dir):
os.makedirs(new_dir)
with open(os.path.join(path, 'module', relative_path.replace('templatemodule', name)), 'w') as output:
for line in lines:
output.write(line.replace('templatemodule', name))
return load(path, manifest)
def load(path, manifest, **kw):
module_model = {}
module_model['local_path'] = path
module_model['module_dynamic_path'] = os.path.join(path, ".trigger", "module_dynamic")
module_model['files'] = {
'manifest': os.path.join(path, 'module', 'manifest.json'),
'module_structure': os.path.join(path, ".trigger", "schema", "module_structure.json")
}
module_model['rawfiles'] = {
'dynamic_platform_version': os.path.join(path, ".trigger", "platform_version.txt")
}
module_model['directories'] = {
'module_directory': os.path.join(path, 'module')
}
return module_model
def create_upload_zip(path, subdirs = [], **kw):
module_path = os.path.abspath(os.path.join(path, 'module'))
zip_base = os.path.abspath(os.path.join(path, '.trigger', 'upload_tmp'))
if os.path.exists(zip_base+".zip"):
os.unlink(zip_base+".zip")
if len(subdirs):
zip_path = _make_partial_archive(zip_base, subdirs, root_dir=module_path)
else:
zip_path = shutil.make_archive(zip_base, 'zip', root_dir=module_path)
return zip_path
def _make_partial_archive(zip_base, subdirs, root_dir):
zip = zipfile.ZipFile(zip_base + ".zip", "w")
with cd(root_dir):
for subdir in subdirs:
if not os.path.exists(subdir):
continue
for root, dirs, files in os.walk(subdir):
for file in files:
zip.write(os.path.join(root, file))
zip.close()
return zip_base + ".zip"
| {
"content_hash": "ff17547568d6a31214417c43e4976e95",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 125,
"avg_line_length": 33.8375,
"alnum_prop": 0.680458071666051,
"repo_name": "mnaughto/trigger-statusbar",
"id": "42199445380b228895cd3b96961bebd6f0ecf74c",
"size": "2707",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".trigger/module_dynamic/module.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "386"
},
{
"name": "CSS",
"bytes": "116661"
},
{
"name": "D",
"bytes": "1536"
},
{
"name": "JavaScript",
"bytes": "64389"
},
{
"name": "Objective-C",
"bytes": "28595"
},
{
"name": "Python",
"bytes": "140312"
}
],
"symlink_target": ""
} |
class CorePlugin:
def __init__(self, bot):
self.bot = bot
def startup(self, config):
self.bot.registerCommand("help", self.cmd_help)
self.bot.registerCommand("h", self.cmd_help)
self.bot.registerCommand("plugins", self.cmd_plugins)
self.bot.registerCommand("password", self.cmd_password, True)
self.bot.registerCommand("login", self.cmd_login, True)
self.bot.registerCommand("reload", self.cmd_reload, True)
self.bot.registerCommand("die", self.cmd_die, True)
self.bot.registerEvent("private_message", self.event_private_message)
self.bot.registerEvent("public_message", self.event_public_message)
self.bot.registerEvent("nick_change", self.event_nick_change)
self.bot.registerEvent("user_part", self.event_user_part)
self.bot.registerEvent("user_quit", self.event_user_quit)
def shutdown(self):
pass
"""
#------------------------------------------#
# Event Handlers #
#------------------------------------------#
"""
def event_private_message(self, ev):
"""
+ person who said it: ev.source
+ their nick: ev.source.nick
+ what they said: ev.arguments[0]
"""
pass
def event_public_message(self, ev):
"""
+ person who said it: ev.source
+ their nick: ev.source.nick
+ what they said: ev.arguments[0]
"""
pass
def event_nick_change(self, ev):
"""
+ old nick: ev.source.nick
+ new nick: ev.target
"""
pass
def event_user_part(self, ev):
"""
+ person who left: ev.source.nick
"""
pass
def event_user_quit(self, ev):
"""
+ person who left: ev.source.nick
"""
pass
"""
#------------------------------------------#
# Commands #
#------------------------------------------#
"""
def cmd_help(self, issuedBy, data):
"""[command] - displays this message"""
if data == "":
pref = self.bot.prefixes[0]
for p in self.bot.plugins:
cmds = [pref + c.name + ("*" if c.password else "")
for c in p.commands]
self.bot.reply("[{}] {}".format(p.name, ", ".join(cmds)))
else:
for p in self.bot.plugins:
for c in p.commands:
if data == c.name:
self.bot.reply(
"[{}] {}{} {}".format(
p.name, self.bot.prefixes[0],
c.name, c.function.__doc__)
)
return
self.bot.reply("Command not found: " + data)
def cmd_plugins(self, issuedBy, data):
"""lists all the currently loaded plugins"""
self.bot.reply("Plugins: " +
", ".join(p.name for p in self.bot.plugins))
"""
#------------------------------------------#
# Admin Commands #
#------------------------------------------#
"""
def cmd_password(self, issuedBy, data):
"""displays the bot's password"""
self.bot.pm(issuedBy, "My password is: " + self.bot.password)
def cmd_login(self, issuedBy, data):
"""logs you in"""
# The login function is special in that it gets the full user object,
# not just the nick
host = issuedBy.host
if host not in self.bot.loggedin:
self.bot.loggedin.append(host)
self.bot.reply("{} has logged in".format(issuedBy.nick))
else:
self.bot.reply("You are already logged in")
def cmd_die(self, issuedBy, data):
"""kills the bot"""
if data:
self.bot.die("{}".format(data))
else:
self.bot.die("Leaving")
def cmd_reload(self, issuedBy, data):
"""reloads plugins"""
self.bot.loadPlugins()
| {
"content_hash": "0451c1fdf46b31472426fe6a3c9d7885",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 77,
"avg_line_length": 32.0078125,
"alnum_prop": 0.4710763973639248,
"repo_name": "anthonynguyen/basebot",
"id": "11f528c1c5eb31441687882e9c79c0400b7152bd",
"size": "4097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basebot/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19879"
}
],
"symlink_target": ""
} |
from . import cli
cli.main()
| {
"content_hash": "3302e2c8a807d8aa7d06976e3634839c",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 17,
"avg_line_length": 7.75,
"alnum_prop": 0.6451612903225806,
"repo_name": "winny-/secret-manager",
"id": "88a04c40450257094b6cb5507f567f3306671d7d",
"size": "31",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "secret_manager/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7634"
}
],
"symlink_target": ""
} |
"""Test error messages for 'getaddressinfo' and 'validateaddress' RPC commands."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
BECH32_VALID = 'rltc1qhku5rq7jz8ulufe2y6fkcpnlvpsta7rqtc0r66'
BECH32_INVALID_BECH32 = 'rltc1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqdmchcc'
BECH32_INVALID_BECH32M = 'rltc1qw508d6qejxtdg4y5r3zarvary0c5xw7k35mrzd'
BECH32_INVALID_VERSION = 'rltc130xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqynjegk'
BECH32_INVALID_SIZE = 'rltc1s0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7v8n0nx0muaewav25430mtr'
BECH32_INVALID_V0_SIZE = 'rltc1qw508d6qejxtdg4y5r3zarvary0c5xw7kqqq5k3my'
BECH32_INVALID_PREFIX = 'bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx'
BASE58_VALID = 'mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJRfn'
BASE58_INVALID_PREFIX = '17VZNX1SN5NtKa8UQFxwQbFeFc3iqRYhem'
INVALID_ADDRESS = 'asfah14i8fajz0123f'
class InvalidAddressErrorMessageTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def test_validateaddress(self):
node = self.nodes[0]
# Bech32
info = node.validateaddress(BECH32_INVALID_SIZE)
assert not info['isvalid']
info = node.validateaddress(BECH32_INVALID_PREFIX)
assert not info['isvalid']
info = node.validateaddress(BECH32_INVALID_BECH32)
assert not info['isvalid']
info = node.validateaddress(BECH32_INVALID_BECH32M)
assert not info['isvalid']
info = node.validateaddress(BECH32_INVALID_V0_SIZE)
assert not info['isvalid']
info = node.validateaddress(BECH32_VALID)
assert info['isvalid']
assert 'error' not in info
# Base58
info = node.validateaddress(BASE58_INVALID_PREFIX)
assert not info['isvalid']
info = node.validateaddress(BASE58_VALID)
assert info['isvalid']
assert 'error' not in info
# Invalid address format
info = node.validateaddress(INVALID_ADDRESS)
assert not info['isvalid']
def test_getaddressinfo(self):
node = self.nodes[0]
assert_raises_rpc_error(-5, "Invalid address", node.getaddressinfo, BECH32_INVALID_SIZE)
assert_raises_rpc_error(-5, "Invalid address", node.getaddressinfo, BECH32_INVALID_PREFIX)
assert_raises_rpc_error(-5, "Invalid address", node.getaddressinfo, BASE58_INVALID_PREFIX)
assert_raises_rpc_error(-5, "Invalid address", node.getaddressinfo, INVALID_ADDRESS)
def run_test(self):
self.test_validateaddress()
self.test_getaddressinfo()
if __name__ == '__main__':
InvalidAddressErrorMessageTest().main()
| {
"content_hash": "27752367a62eb35b976c3ea526711b4a",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 102,
"avg_line_length": 35.5,
"alnum_prop": 0.725,
"repo_name": "litecoin-project/litecoin",
"id": "1786161ffa597d83dce06dd75a91b84f826e4580",
"size": "3049",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/rpc_invalid_address_message.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "898000"
},
{
"name": "C",
"bytes": "1594708"
},
{
"name": "C++",
"bytes": "8860047"
},
{
"name": "CMake",
"bytes": "29310"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "30291"
},
{
"name": "M4",
"bytes": "226003"
},
{
"name": "Makefile",
"bytes": "123607"
},
{
"name": "Objective-C++",
"bytes": "5489"
},
{
"name": "Python",
"bytes": "2267056"
},
{
"name": "QMake",
"bytes": "798"
},
{
"name": "Sage",
"bytes": "31382"
},
{
"name": "Scheme",
"bytes": "7554"
},
{
"name": "Shell",
"bytes": "150309"
}
],
"symlink_target": ""
} |
"""
This linter checks for databases with no tables
"""
from indexdigest.utils import LinterEntry
def get_empty_databases(database):
"""
:type database indexdigest.database.Database
:rtype: list[str]
"""
for db_name in database.query_list('SHOW DATABASES'):
# skip "core" MySQL databases
if db_name in ['information_schema']:
continue
tables_count = database.query_field('SELECT COUNT(*) FROM information_schema.TABLES '
'WHERE TABLE_SCHEMA = "{}" AND '
'TABLE_TYPE = "BASE TABLE"'.format(db_name))
# print(db_name, tables_count)
if tables_count == 0:
yield db_name
def check_empty_database(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for db_name in get_empty_databases(database):
yield LinterEntry(linter_type='empty_database', table_name=db_name,
message='"{}" database has no tables'.format(db_name))
| {
"content_hash": "84f84a2cb3bdfecff673aacf934a3d25",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 93,
"avg_line_length": 33.84375,
"alnum_prop": 0.5844875346260388,
"repo_name": "macbre/index-digest",
"id": "7b73df1edd5fe87efbbdf113a11161cf77647bd7",
"size": "1083",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indexdigest/linters/linter_0164_empty_database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "858"
},
{
"name": "Makefile",
"bytes": "1000"
},
{
"name": "Python",
"bytes": "154363"
},
{
"name": "Shell",
"bytes": "333"
}
],
"symlink_target": ""
} |
import xml.dom.minidom as minidom
import urllib2
import datetime
import logging
signof = lambda x: x > 0 and 1 or -1
def parse_date(date, hd=None, md=None):
hour = ''
minute = ''
colon = 0
tz = False
tokens = []
for token in date:
if colon >= 2 and token in ('-', '+'):
tz = True
colon = 0
if token == ':':
colon += 1
if tz:
if token == ':':
continue
if colon == 0:
hour += token
elif colon == 1:
minute += token
continue
else:
tokens.append(token)
txt = ''.join(tokens)
hour = int(hour)
minute = signof(hour) * int(minute)
dt = datetime.datetime.strptime(txt, '%Y-%m-%dT%H:%M:%S') - datetime.timedelta(hours=int(hour), minutes=int(minute))
return dt
def commits(username, project, branch='master', limit=5):
"""Fetch a list of GitHub commits."""
commits = []
web_url = 'http://github.com/%s/%s/' % (username, project)
try:
r = urllib2.urlopen(web_url + 'commits/%s.atom' % branch)
except IOError:
return commits
xml = r.read()
tree = minidom.parseString(xml)
entries = tree.getElementsByTagName('entry')
for entry in entries:
d = {}
d['project'] = project
d['weburl'] = web_url
d['url'] = entry.getElementsByTagName('link')[0].getAttribute('href')
d['title'] = entry.getElementsByTagName('title')[0].childNodes[0].data
date = entry.getElementsByTagName('updated')[0].childNodes[0].data
d['date'] = parse_date(date)
author = entry.getElementsByTagName('author')[0]
d['author'] = author.getElementsByTagName('name')[0].childNodes[0].data
commits.append(d)
return commits[:limit]
if __name__ == '__main__':
print commits('mkrautz', 'mumble-iphoneos')
| {
"content_hash": "de6a24addd33d25db5ceec4e45ee8dce",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 117,
"avg_line_length": 26.11111111111111,
"alnum_prop": 0.6443768996960486,
"repo_name": "mkrautz/mumble-iphoneos-betaweb",
"id": "8b58d4e3ed5f6cae5fd0b9c0a3f4873cb5767c90",
"size": "1664",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "github.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "57812"
}
],
"symlink_target": ""
} |
from .scg import SCG
from .optimization import *
| {
"content_hash": "456a6e89b5c2a9228b6c36d3e35112c5",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 27,
"avg_line_length": 24.5,
"alnum_prop": 0.7755102040816326,
"repo_name": "jameshensman/GPy",
"id": "909f897bc0d57d3ca9a50b626999a462933d5656",
"size": "49",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "GPy/inference/optimization/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2030"
},
{
"name": "C++",
"bytes": "1605"
},
{
"name": "Python",
"bytes": "1683790"
},
{
"name": "Shell",
"bytes": "122"
}
],
"symlink_target": ""
} |
import copy
import pandas as pd
from threeML.plugins.SpectrumLike import SpectrumLike
from threeML.utils.OGIP.response import InstrumentResponse
from threeML.utils.spectrum.binned_spectrum import (
BinnedSpectrumWithDispersion,
ChannelSet,
)
__instrument_name = "General binned spectral data with energy dispersion"
class DispersionSpectrumLike(SpectrumLike):
def __init__(
self,
name,
observation,
background=None,
background_exposure=None,
verbose=True,
tstart=None,
tstop=None,
):
"""
A plugin for generic spectral data with energy dispersion, accepts an observed binned spectrum,
and a background binned spectrum or plugin with the background data.
In the case of a binned background spectrum, the background model is profiled
out and the appropriate profile-likelihood is used to fit the total spectrum. In this
case, caution must be used when there are zero background counts in bins as the
profiled background parameters (one per channel) will then have zero information from which to
constrain the background. It is recommended to bin the spectrum such that there is one background count
per channel.
If either an SpectrumLike or XYLike instance is provided as background, it is assumed that this is the
background data and the likelihood model from this plugin is used to simultaneously fit the background
and source.
:param name: the plugin name
:param observation: the observed spectrum
:param background: the background spectrum or a plugin from which the background will be modeled
:param background_exposure: (optional) adjust the background exposure of the modeled background data comes from and
XYLike plugin
:param verbose: turn on/off verbose logging
"""
assert isinstance(
observation, BinnedSpectrumWithDispersion
), "observed spectrum is not an instance of BinnedSpectrumWithDispersion"
assert (
observation.response is not None
), "the observed spectrum does not have a response"
# assign the response to the plugins
self._rsp = observation.response # type: InstrumentResponse
super(DispersionSpectrumLike, self).__init__(
name=name,
observation=observation,
background=background,
background_exposure=background_exposure,
verbose=verbose,
tstart=tstart,
tstop=tstop,
)
def set_model(self, likelihoodModel):
"""
Set the model to be used in the joint minimization.
"""
# Store likelihood model
self._like_model = likelihoodModel
# We assume there are no extended sources, since we cannot handle them here
assert self._like_model.get_number_of_extended_sources() == 0, (
"OGIP-like plugins do not support " "extended sources"
)
# Get the differential flux function, and the integral function, with no dispersion,
# we simply integrate the model over the bins
differential_flux, integral = self._get_diff_flux_and_integral(self._like_model)
self._rsp.set_function(integral)
def _evaluate_model(self):
"""
evaluates the full model over all channels
:return:
"""
return self._rsp.convolve()
def get_simulated_dataset(self, new_name=None, **kwargs):
"""
Returns another DispersionSpectrumLike instance where data have been obtained by randomizing the current expectation from the
model, as well as from the background (depending on the respective noise models)
:return: a DispersionSpectrumLike simulated instance
"""
# pass the response thru to the constructor
return super(DispersionSpectrumLike, self).get_simulated_dataset(
new_name=new_name, **kwargs
)
def get_pha_files(self):
info = {}
# we want to pass copies so that
# the user doesn't grab the instance
# and try to modify things. protection
info["pha"] = copy.copy(self._observed_spectrum)
if self._background_spectrum is not None:
info["bak"] = copy.copy(self._background_spectrum)
info["rsp"] = copy.copy(self._rsp)
return info
def display_rsp(self):
"""
Display the currently loaded full response matrix, i.e., RMF and ARF convolved
:return:
"""
self._rsp.plot_matrix()
@property
def response(self):
return self._rsp
def _output(self):
# type: () -> pd.Series
super_out = super(DispersionSpectrumLike, self)._output() # type: pd.Series
the_df = pd.Series({"response": self._rsp.rsp_filename})
return super_out.append(the_df)
def write_pha(self, filename, overwrite=False, force_rsp_write=False):
"""
Writes the observation, background and (optional) rsp to PHAII fits files
:param filename: base file name to write out
:param overwrite: if you would like to force overwriting of the files
:param force_rsp_write: force the writing of an rsp even if not required
"""
# we need to pass up the variables to an OGIPLike
# so that we have the proper variable name
# a local import here because OGIPLike is dependent on this
from threeML.plugins.OGIPLike import OGIPLike
ogiplike = OGIPLike.from_general_dispersion_spectrum(self)
ogiplike.write_pha(
file_name=filename, overwrite=overwrite, force_rsp_write=force_rsp_write
)
@staticmethod
def _build_fake_observation(
fake_data, channel_set, source_errors, source_sys_errors, is_poisson, **kwargs
):
"""
This is the fake observation builder for SpectrumLike which builds data
for a binned spectrum without dispersion. It must be overridden in child classes.
:param fake_data: series of values... they are ignored later
:param channel_set: a channel set
:param source_errors:
:param source_sys_errors:
:param is_poisson:
:return:
"""
assert (
"response" in kwargs
), "A response was not provided. Cannor build synthetic observation"
response = kwargs.pop("response")
observation = BinnedSpectrumWithDispersion(
fake_data,
exposure=1.0,
response=response,
count_errors=source_errors,
sys_errors=source_sys_errors,
quality=None,
scale_factor=1.0,
is_poisson=is_poisson,
mission="fake_mission",
instrument="fake_instrument",
tstart=0.0,
tstop=1.0,
)
return observation
@classmethod
def from_function(
cls,
name,
source_function,
response,
source_errors=None,
source_sys_errors=None,
background_function=None,
background_errors=None,
background_sys_errors=None,
):
"""
Construct a simulated spectrum from a given source function and (optional) background function. If source and/or background errors are not supplied, the likelihood is assumed to be Poisson.
:param name: simulated data set name
:param source_function: astromodels function
:param response: 3ML Instrument response
:param source_errors: (optional) gaussian source errors
:param source_sys_errors: (optional) systematic source errors
:param background_function: (optional) astromodels background function
:param background_errors: (optional) gaussian background errors
:param background_sys_errors: (optional) background systematic errors
:return: simulated DispersionSpectrumLike plugin
"""
channel_set = ChannelSet.from_instrument_response(response)
energy_min, energy_max = channel_set.bin_stack.T
# pass the variables to the super class
return super(DispersionSpectrumLike, cls).from_function(
name,
source_function,
energy_min,
energy_max,
source_errors,
source_sys_errors,
background_function,
background_errors,
background_sys_errors,
response=response,
)
| {
"content_hash": "dcacc58fcf06c1776af944c51c6a11d7",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 197,
"avg_line_length": 33.84251968503937,
"alnum_prop": 0.6386691484411354,
"repo_name": "giacomov/3ML",
"id": "035d57e076aa1b1e46d453b0364cc1943d4393a7",
"size": "8596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "threeML/plugins/DispersionSpectrumLike.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "474"
},
{
"name": "Dockerfile",
"bytes": "477"
},
{
"name": "Python",
"bytes": "1305668"
},
{
"name": "Shell",
"bytes": "17627"
}
],
"symlink_target": ""
} |
import itertools
from .. import bytecode, error as error_types, opnames as ops
from .descriptors import parseFieldDescriptor, parseMethodDescriptor, parseUnboundMethodDescriptor
from .verifier_types import OBJECT_INFO, T_ADDRESS, T_ARRAY, T_DOUBLE, T_FLOAT, T_INT, T_INT_CONST, T_INVALID, T_LONG, T_NULL, T_OBJECT, T_UNINIT_OBJECT, T_UNINIT_THIS, decrementDim, exactArrayFrom, fullinfo_t, mergeTypes
class VerifierTypesState(object):
def __init__(self, stack, locals, masks):
self.stack = stack
self.locals = locals
self.masks = masks
def copy(self): return VerifierTypesState(self.stack, self.locals, self.masks)
def withExcept(self, t): return VerifierTypesState([t], self.locals, self.masks)
def pop(self, n):
if n == 0:
return []
self.stack, popped = self.stack[:-n], self.stack[-n:]
return popped
def push(self, vals):
self.stack = self.stack + list(vals)
def setLocal(self, i, v):
if len(self.locals) < i:
self.locals = self.locals + [T_INVALID]*(i - len(self.locals))
self.locals = self.locals[:i] + [v] + self.locals[i+1:]
new = frozenset([i])
self.masks = [(addr, old | new) for addr, old in self.masks]
def local(self, i):
if len(self.locals) <= i:
return T_INVALID
return self.locals[i]
def jsr(self, target):
self.masks = self.masks + [(target, frozenset())]
def replace(self, old, new):
self.stack = [(new if v == old else v) for v in self.stack]
mask = frozenset(i for i, v in enumerate(self.locals) if v == old)
self.locals = [(new if v == old else v) for v in self.locals]
self.masks = [(addr, oldmask | mask) for addr, oldmask in self.masks]
def invalidateNews(self):
# Doesn't need to update mask
self.stack = [(T_INVALID if v.tag == '.new' else v) for v in self.stack]
self.locals = [(T_INVALID if v.tag == '.new' else v) for v in self.locals]
def maskFor(self, called):
self.masks = self.masks[:]
target, mask = self.masks.pop()
while target != called:
target, mask = self.masks.pop()
return mask
def returnTo(self, called, jsrstate):
mask = self.maskFor(called)
# merge locals using mask
zipped = itertools.izip_longest(self.locals, jsrstate.locals, fillvalue=T_INVALID)
self.locals = [(x if i in mask else y) for i,(x,y) in enumerate(zipped)]
def merge(self, other, env):
old_triple = self.stack, self.locals, self.masks
assert len(self.stack) == len(other.stack)
self.stack = [mergeTypes(env, new, old) for old, new in zip(self.stack, other.stack)]
self.locals = [mergeTypes(env, new, old) for old, new in zip(self.locals, other.locals)]
while self.locals and self.locals[-1] == T_INVALID:
self.locals.pop()
# Merge Masks
last_match = -1
mergedmasks = []
for entry1, mask1 in self.masks:
for j, (entry2, mask2) in enumerate(other.masks):
if j > last_match and entry1 == entry2:
item = entry1, (mask1 | mask2)
mergedmasks.append(item)
last_match = j
self.masks = mergedmasks
return (self.stack, self.locals, self.masks) != old_triple
def stateFromInitialArgs(args): return VerifierTypesState([], args[:], [])
_invoke_ops = (ops.INVOKESPECIAL, ops.INVOKESTATIC, ops.INVOKEVIRTUAL, ops.INVOKEINTERFACE, ops.INVOKEINIT, ops.INVOKEDYNAMIC)
def _loadFieldDesc(cpool, ind):
target, name, desc = cpool.getArgsCheck('Field', ind)
return parseFieldDescriptor(desc)
def _loadMethodDesc(cpool, ind):
target, name, desc = cpool.getArgs(ind)
return parseMethodDescriptor(desc)
def _indexToCFMInfo(cpool, ind, typen):
actual = cpool.getType(ind)
# JVM_GetCPMethodClassNameUTF accepts both
assert actual == typen or actual == 'InterfaceMethod' and typen == 'Method'
cname = cpool.getArgs(ind)[0]
if cname.startswith('[') or cname.endswith(';'):
try:
return parseFieldDescriptor(cname)[0]
except ValueError as e:
return T_INVALID
else:
return T_OBJECT(cname)
# Instructions which pop a fixed amount
_popAmount = {
ops.ARRLOAD_OBJ: 2,
ops.ARRSTORE_OBJ: 3,
ops.ARRLOAD: 2,
ops.TRUNCATE: 1,
ops.LCMP: 4,
ops.IF_A: 1,
ops.IF_I: 1,
ops.IF_ACMP: 2,
ops.IF_ICMP: 2,
ops.SWITCH: 1,
ops.NEWARRAY: 1,
ops.ANEWARRAY: 1,
ops.ARRLEN: 1,
ops.THROW: 1,
ops.CHECKCAST: 1,
ops.INSTANCEOF: 1,
ops.MONENTER: 1,
ops.MONEXIT: 1,
ops.GETFIELD: 1,
ops.NOP: 0,
ops.CONSTNULL: 0,
ops.CONST: 0,
ops.LDC: 0,
ops.LOAD: 0,
ops.IINC: 0,
ops.GOTO: 0,
ops.JSR: 0,
ops.RET: 0,
ops.NEW: 0,
ops.GETSTATIC: 0,
}
# Instructions which pop a variable amount depending on whether type is category 2
_popAmountVar = {
ops.STORE: (1, 0),
ops.NEG: (1, 0),
ops.CONVERT: (1, 0),
ops.ADD: (2, 0),
ops.SUB: (2, 0),
ops.MUL: (2, 0),
ops.DIV: (2, 0),
ops.REM: (2, 0),
ops.XOR: (2, 0),
ops.OR: (2, 0),
ops.AND: (2, 0),
ops.FCMP: (2, 0),
ops.SHL: (1, 1),
ops.SHR: (1, 1),
ops.USHR: (1, 1),
ops.ARRSTORE: (1, 2),
}
# Generic stack codes
genericStackCodes = {
ops.POP: (1, []),
ops.POP2: (2, []),
ops.DUP: (1, [0, 0]),
ops.DUPX1: (2, [1, 0, 1]),
ops.DUPX2: (3, [2, 0, 1, 2]),
ops.DUP2: (2, [0, 1, 0, 1]),
ops.DUP2X1: (3, [1, 2, 0, 1, 2]),
ops.DUP2X2: (4, [2, 3, 0, 1, 2, 3]),
ops.SWAP: (2, [1, 0]),
}
def _getPopAmount(cpool, instr, method):
op = instr[0]
if op in _popAmount:
return _popAmount[op]
if op in _popAmountVar:
a, b = _popAmountVar[op]
cat = 2 if instr[1] in 'JD' else 1
return a * cat + b
if op in genericStackCodes:
return genericStackCodes[op][0]
if op == ops.MULTINEWARRAY:
return instr[2]
elif op == ops.RETURN:
return len(parseMethodDescriptor(method.descriptor)[1])
elif op in (ops.PUTFIELD, ops.PUTSTATIC):
args = len(_loadFieldDesc(cpool, instr[1]))
if op == ops.PUTFIELD:
args += 1
return args
elif op in _invoke_ops:
args = len(_loadMethodDesc(cpool, instr[1])[0])
if op != ops.INVOKESTATIC and op != ops.INVOKEDYNAMIC:
args += 1
return args
codes = dict(zip('IFJD', [T_INT, T_FLOAT, T_LONG, T_DOUBLE]))
def _getStackResult(cpool, instr, key):
op = instr[0]
if op in (ops.TRUNCATE, ops.LCMP, ops.FCMP, ops.ARRLEN, ops.INSTANCEOF):
return T_INT
elif op in (ops.ADD, ops.SUB, ops.MUL, ops.DIV, ops.REM, ops.XOR, ops.AND, ops.OR, ops.SHL, ops.SHR, ops.USHR, ops.NEG):
return codes[instr[1]]
elif op == ops.CONSTNULL:
return T_NULL
elif op == ops.CONST:
if instr[1] == 'I':
return T_INT_CONST(instr[2])
return codes[instr[1]]
elif op == ops.ARRLOAD:
return codes.get(instr[1], T_INT)
elif op == ops.CONVERT:
return codes[instr[2]]
elif op == ops.LDC:
return {
'Int': T_INT,
'Long': T_LONG,
'Float': T_FLOAT,
'Double': T_DOUBLE,
'String': T_OBJECT('java/lang/String'),
'Class': T_OBJECT('java/lang/Class'),
'MethodType': T_OBJECT('java/lang/invoke/MethodType'),
'MethodHandle': T_OBJECT('java/lang/invoke/MethodHandle'),
}[cpool.getType(instr[1])]
elif op == ops.JSR:
return T_ADDRESS(instr[1])
elif op in (ops.CHECKCAST, ops.NEW, ops.ANEWARRAY, ops.MULTINEWARRAY):
target = _indexToCFMInfo(cpool, instr[1], 'Class')
if op == ops.ANEWARRAY:
return T_ARRAY(target)
elif op == ops.NEW:
return T_UNINIT_OBJECT(key)
return target
elif op == ops.NEWARRAY:
return parseFieldDescriptor('[' + instr[1])[0]
elif op in (ops.GETFIELD, ops.GETSTATIC):
return _loadFieldDesc(cpool, instr[1])[0]
elif op in _invoke_ops:
out = _loadMethodDesc(cpool, instr[1])[1]
assert 0 <= len(out) <= 2
return out[0] if out else None
class InstructionNode(object):
__slots__ = "key code env class_ cpool instruction op visited changed offsetToIndex indexToOffset state out_state jsrTarget next_instruction returnedFrom successors pop_amount stack_push stack_code target_type isThisCtor".split()
def __init__(self, code, offsetToIndex, indexToOffset, key):
self.key = key
assert(self.key is not None) # if it is this will cause problems with origin tracking
self.code = code
self.env = code.class_.env
self.class_ = code.class_
self.cpool = self.class_.cpool
self.instruction = code.bytecode[key]
self.op = self.instruction[0]
self.visited, self.changed = False, False
# store for usage calculating JSRs, finding successor instructions and the like
self.offsetToIndex = offsetToIndex
self.indexToOffset = indexToOffset
self.state = None
# Fields to be assigned later
self.jsrTarget = None
self.next_instruction = None
self.returnedFrom = None
self.successors = None
self.pop_amount = -1
self.stack_push = []
self.stack_code = None
# for blockmaker
self.target_type = None
self.isThisCtor = False
self.out_state = None # store out state for JSR/RET instructions
self._precomputeValues()
def _removeInterface(self, vt):
if vt.tag == '.obj' and vt.extra is not None and self.env.isInterface(vt.extra, forceCheck=True):
return T_ARRAY(OBJECT_INFO, vt.dim)
return vt
def _precomputeValues(self):
# parsed_desc, successors
off_i = self.offsetToIndex[self.key]
self.next_instruction = self.indexToOffset[off_i+1] # None if end of code
op = self.instruction[0]
self.pop_amount = _getPopAmount(self.cpool, self.instruction, self.code.method)
# cache these, since they're not state dependent
result = _getStackResult(self.cpool, self.instruction, self.key)
# temporary hack
if op == ops.CHECKCAST:
result = self._removeInterface(result)
if result is not None:
self.stack_push = [result]
if result in (T_LONG, T_DOUBLE):
self.stack_push.append(T_INVALID)
if op in genericStackCodes:
self.stack_code = genericStackCodes[op][1]
if op == ops.NEW:
self.target_type = _indexToCFMInfo(self.cpool, self.instruction[1], 'Class')
# Now get successors
next_ = self.next_instruction
if op in (ops.IF_A, ops.IF_I, ops.IF_ICMP, ops.IF_ACMP):
self.successors = next_, self.instruction[2]
elif op in (ops.JSR, ops.GOTO):
self.successors = self.instruction[1],
elif op in (ops.RETURN, ops.THROW):
self.successors = ()
elif op == ops.RET:
self.successors = None # calculate it when the node is reached
elif op == ops.SWITCH:
opname, default, jumps = self.instruction
targets = (default,)
if jumps:
targets += zip(*jumps)[1]
self.successors = targets
else:
self.successors = next_,
def _getNewState(self, iNodes):
state = self.state.copy()
popped = state.pop(self.pop_amount)
# Local updates/reading
op = self.instruction[0]
if op == ops.LOAD:
state.push([state.local(self.instruction[2])])
if self.instruction[1] in 'JD':
state.push([T_INVALID])
elif op == ops.STORE:
for i, val in enumerate(popped):
state.setLocal(self.instruction[2] + i, val)
elif op == ops.IINC:
state.setLocal(self.instruction[1], T_INT) # Make sure to clobber constants
elif op == ops.JSR:
state.jsr(self.instruction[1])
elif op == ops.NEW:
# This should never happen, but better safe than sorry.
state.replace(self.stack_push[0], T_INVALID)
elif op == ops.INVOKEINIT:
old = popped[0]
if old.tag == '.new':
new = _indexToCFMInfo(self.cpool, self.instruction[1], 'Method')
else: # .init
new = T_OBJECT(self.class_.name)
self.isThisCtor = True
state.replace(old, new)
# Make sure that push happens after local replacement in case of new/invokeinit
if self.stack_code is not None:
state.push(popped[i] for i in self.stack_code)
elif op == ops.ARRLOAD_OBJ:
# temporary hack
result = self._removeInterface(decrementDim(popped[0]))
state.push([result])
elif op == ops.NEWARRAY or op == ops.ANEWARRAY:
arrt = self.stack_push[0]
size = popped[0].const
if size is not None:
arrt = exactArrayFrom(arrt, size)
state.push([arrt])
else:
state.push(self.stack_push)
if self.op in (ops.RET, ops.JSR):
state.invalidateNews()
self.out_state = state # store for later convienence
assert all(isinstance(vt, fullinfo_t) for vt in state.stack)
assert all(isinstance(vt, fullinfo_t) for vt in state.locals)
return state
def _mergeSingleSuccessor(self, other, newstate, iNodes, isException):
if self.op == ops.RET and not isException:
# Get the instruction before other
off_i = self.offsetToIndex[other.key]
jsrnode = iNodes[self.indexToOffset[off_i-1]]
jsrnode.returnedFrom = self.key
if jsrnode.visited: # if not, skip for later
newstate = newstate.copy()
newstate.returnTo(jsrnode.instruction[1], jsrnode.state)
else:
return
if not other.visited:
other.state = newstate.copy()
other.visited = other.changed = True
else:
changed = other.state.merge(newstate, self.env)
other.changed = other.changed or changed
def update(self, iNodes, exceptions):
assert self.visited
self.changed = False
newstate = self._getNewState(iNodes)
successors = self.successors
if self.op == ops.JSR and self.returnedFrom is not None:
iNodes[self.returnedFrom].changed = True
if successors is None:
assert self.op == ops.RET
called = self.state.local(self.instruction[1]).extra
temp = [n.next_instruction for n in iNodes.values() if (n.op == ops.JSR and n.instruction[1] == called)]
successors = self.successors = tuple(temp)
self.jsrTarget = called # store for later use in ssa creation
# Merge into exception handlers first
for (start, end, handler, except_info) in exceptions:
if start <= self.key < end:
self._mergeSingleSuccessor(handler, self.state.withExcept(except_info), iNodes, True)
if self.op == ops.INVOKEINIT: # two cases since the ctor may suceed or fail before throwing
self._mergeSingleSuccessor(handler, newstate.withExcept(except_info), iNodes, True)
# Now regular successors
for k in self.successors:
self._mergeSingleSuccessor(iNodes[k], newstate, iNodes, False)
def __str__(self): # pragma: no cover
lines = ['{}: {}'.format(self.key, bytecode.printInstruction(self.instruction))]
if self.visited:
lines.append('Stack: ' + ', '.join(map(str, self.state.stack)))
lines.append('Locals: ' + ', '.join(map(str, self.state.locals)))
if self.state.masks:
lines.append('Masks:')
lines += ['\t{}: {}'.format(entry, sorted(cset)) for entry, cset in self.state.masks]
else:
lines.append('\tunvisited')
return '\n'.join(lines) + '\n'
def verifyBytecode(code):
method, class_ = code.method, code.class_
args, rval = parseUnboundMethodDescriptor(method.descriptor, class_.name, method.static)
env = class_.env
# Object has no superclass to construct, so it doesn't get an uninit this
if method.isConstructor and class_.name != 'java/lang/Object':
assert args[0] == T_OBJECT(class_.name)
args[0] = T_UNINIT_THIS
assert len(args) <= 255 and len(args) <= code.locals
offsets = sorted(code.bytecode.keys())
offset_rmap = {v:i for i,v in enumerate(offsets)}
offsets.append(None) # Sentinel for end of code
iNodes = [InstructionNode(code, offset_rmap, offsets, key) for key in offsets[:-1]]
iNodeLookup = {n.key:n for n in iNodes}
keys = frozenset(iNodeLookup)
for raw in code.except_raw:
if not ((0 <= raw.start < raw.end) and (raw.start in keys) and
(raw.handler in keys) and (raw.end in keys or raw.end == code.codelen)):
keylist = sorted(keys) + [code.codelen]
msg = "Illegal exception handler: {}\nValid offsets are: {}".format(raw, ', '.join(map(str, keylist)))
raise error_types.VerificationError(msg)
def makeException(rawdata):
if rawdata.type_ind:
typen = class_.cpool.getArgsCheck('Class', rawdata.type_ind)
else:
typen = 'java/lang/Throwable'
return (rawdata.start, rawdata.end, iNodeLookup[rawdata.handler], T_OBJECT(typen))
exceptions = map(makeException, code.except_raw)
start = iNodes[0]
start.state = stateFromInitialArgs(args)
start.visited, start.changed = True, True
done = False
while not done:
done = True
for node in iNodes:
if node.changed:
node.update(iNodeLookup, exceptions)
done = False
return iNodes
| {
"content_hash": "7512fa2febb9d89f91bfd528f2a509f3",
"timestamp": "",
"source": "github",
"line_count": 508,
"max_line_length": 233,
"avg_line_length": 35.860236220472444,
"alnum_prop": 0.5895592029423067,
"repo_name": "orneryhippo/saturdays",
"id": "9ab8cb3f4d504a7d5a8dd6dc65f1ad870b70d26d",
"size": "18217",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Krakatau-master/Krakatau/Krakatau/verifier/inference_verifier.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "2404"
},
{
"name": "Jasmin",
"bytes": "40997"
},
{
"name": "Java",
"bytes": "40405"
},
{
"name": "Objective-J",
"bytes": "10978"
},
{
"name": "Python",
"bytes": "572639"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import logging
import warnings
from admin_scripts.tests import AdminScriptTestCase
from django.core import mail
from django.core.files.temp import NamedTemporaryFile
from django.test import RequestFactory, TestCase, override_settings
from django.test.utils import LoggingCaptureMixin, patch_logger
from django.utils.deprecation import RemovedInNextVersionWarning
from django.utils.encoding import force_text
from django.utils.log import (
AdminEmailHandler, CallbackFilter, RequireDebugFalse, RequireDebugTrue,
)
from django.utils.six import StringIO
from .logconfig import MyEmailBackend
# logging config prior to using filter with mail_admins
OLD_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
class LoggingFiltersTest(TestCase):
def test_require_debug_false_filter(self):
"""
Test the RequireDebugFalse filter class.
"""
filter_ = RequireDebugFalse()
with self.settings(DEBUG=True):
self.assertEqual(filter_.filter("record is not used"), False)
with self.settings(DEBUG=False):
self.assertEqual(filter_.filter("record is not used"), True)
def test_require_debug_true_filter(self):
"""
Test the RequireDebugTrue filter class.
"""
filter_ = RequireDebugTrue()
with self.settings(DEBUG=True):
self.assertEqual(filter_.filter("record is not used"), True)
with self.settings(DEBUG=False):
self.assertEqual(filter_.filter("record is not used"), False)
class DefaultLoggingTest(LoggingCaptureMixin, TestCase):
def test_django_logger(self):
"""
The 'django' base logger only output anything when DEBUG=True.
"""
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), '')
with self.settings(DEBUG=True):
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), 'Hey, this is an error.\n')
class WarningLoggerTests(TestCase):
"""
Tests that warnings output for RemovedInDjangoXXWarning (XX being the next
Django version) is enabled and captured to the logging system
"""
def setUp(self):
# If tests are invoke with "-Wall" (or any -W flag actually) then
# warning logging gets disabled (see configure_logging in django/utils/log.py).
# However, these tests expect warnings to be logged, so manually force warnings
# to the logs. Use getattr() here because the logging capture state is
# undocumented and (I assume) brittle.
self._old_capture_state = bool(getattr(logging, '_warnings_showwarning', False))
logging.captureWarnings(True)
# this convoluted setup is to avoid printing this deprecation to
# stderr during test running - as the test runner forces deprecations
# to be displayed at the global py.warnings level
self.logger = logging.getLogger('py.warnings')
self.outputs = []
self.old_streams = []
for handler in self.logger.handlers:
self.old_streams.append(handler.stream)
self.outputs.append(StringIO())
handler.stream = self.outputs[-1]
def tearDown(self):
for i, handler in enumerate(self.logger.handlers):
self.logger.handlers[i].stream = self.old_streams[i]
# Reset warnings state.
logging.captureWarnings(self._old_capture_state)
@override_settings(DEBUG=True)
def test_warnings_capture(self):
with warnings.catch_warnings():
warnings.filterwarnings('always')
warnings.warn('Foo Deprecated', RemovedInNextVersionWarning)
output = force_text(self.outputs[0].getvalue())
self.assertIn('Foo Deprecated', output)
def test_warnings_capture_debug_false(self):
with warnings.catch_warnings():
warnings.filterwarnings('always')
warnings.warn('Foo Deprecated', RemovedInNextVersionWarning)
output = force_text(self.outputs[0].getvalue())
self.assertNotIn('Foo Deprecated', output)
@override_settings(DEBUG=True)
def test_error_filter_still_raises(self):
with warnings.catch_warnings():
warnings.filterwarnings(
'error',
category=RemovedInNextVersionWarning
)
with self.assertRaises(RemovedInNextVersionWarning):
warnings.warn('Foo Deprecated', RemovedInNextVersionWarning)
class CallbackFilterTest(TestCase):
def test_sense(self):
f_false = CallbackFilter(lambda r: False)
f_true = CallbackFilter(lambda r: True)
self.assertEqual(f_false.filter("record"), False)
self.assertEqual(f_true.filter("record"), True)
def test_passes_on_record(self):
collector = []
def _callback(record):
collector.append(record)
return True
f = CallbackFilter(_callback)
f.filter("a record")
self.assertEqual(collector, ["a record"])
class AdminEmailHandlerTest(TestCase):
logger = logging.getLogger('django')
def get_admin_email_handler(self, logger):
# Ensure that AdminEmailHandler does not get filtered out
# even with DEBUG=True.
admin_email_handler = [
h for h in logger.handlers
if h.__class__.__name__ == "AdminEmailHandler"
][0]
return admin_email_handler
def test_fail_silently(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertTrue(admin_email_handler.connection().fail_silently)
@override_settings(
ADMINS=[('whatever admin', 'admin@example.com')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
)
def test_accepts_args(self):
"""
Ensure that user-supplied arguments and the EMAIL_SUBJECT_PREFIX
setting are used to compose the email subject.
Refs #16736.
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
self.logger.error(message, token1, token2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['admin@example.com'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR: Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[('whatever admin', 'admin@example.com')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
INTERNAL_IPS=['127.0.0.1'],
)
def test_accepts_args_and_request(self):
"""
Ensure that the subject is also handled if being
passed a request object.
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
rf = RequestFactory()
request = rf.get('/')
self.logger.error(message, token1, token2,
extra={
'status_code': 403,
'request': request,
}
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['admin@example.com'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR (internal IP): Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[('admin', 'admin@example.com')],
EMAIL_SUBJECT_PREFIX='',
DEBUG=False,
)
def test_subject_accepts_newlines(self):
"""
Ensure that newlines in email reports' subjects are escaped to avoid
AdminErrorHandler to fail.
Refs #17281.
"""
message = 'Message \r\n with newlines'
expected_subject = 'ERROR: Message \\r\\n with newlines'
self.assertEqual(len(mail.outbox), 0)
self.logger.error(message)
self.assertEqual(len(mail.outbox), 1)
self.assertNotIn('\n', mail.outbox[0].subject)
self.assertNotIn('\r', mail.outbox[0].subject)
self.assertEqual(mail.outbox[0].subject, expected_subject)
@override_settings(
ADMINS=(('admin', 'admin@example.com'),),
EMAIL_SUBJECT_PREFIX='',
DEBUG=False,
)
def test_truncate_subject(self):
"""
RFC 2822's hard limit is 998 characters per line.
So, minus "Subject: ", the actual subject must be no longer than 989
characters.
Refs #17281.
"""
message = 'a' * 1000
expected_subject = 'ERROR: aa' + 'a' * 980
self.assertEqual(len(mail.outbox), 0)
self.logger.error(message)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, expected_subject)
@override_settings(
ADMINS=[('admin', 'admin@example.com')],
DEBUG=False,
)
def test_uses_custom_email_backend(self):
"""
Refs #19325
"""
message = 'All work and no play makes Jack a dull boy'
admin_email_handler = self.get_admin_email_handler(self.logger)
mail_admins_called = {'called': False}
def my_mail_admins(*args, **kwargs):
connection = kwargs['connection']
self.assertIsInstance(connection, MyEmailBackend)
mail_admins_called['called'] = True
# Monkeypatches
orig_mail_admins = mail.mail_admins
orig_email_backend = admin_email_handler.email_backend
mail.mail_admins = my_mail_admins
admin_email_handler.email_backend = (
'logging_tests.logconfig.MyEmailBackend')
try:
self.logger.error(message)
self.assertTrue(mail_admins_called['called'])
finally:
# Revert Monkeypatches
mail.mail_admins = orig_mail_admins
admin_email_handler.email_backend = orig_email_backend
@override_settings(
ADMINS=[('whatever admin', 'admin@example.com')],
)
def test_emit_non_ascii(self):
"""
#23593 - AdminEmailHandler should allow Unicode characters in the
request.
"""
handler = self.get_admin_email_handler(self.logger)
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
rf = RequestFactory()
url_path = '/º'
record.request = rf.get(url_path)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.to, ['admin@example.com'])
self.assertEqual(msg.subject, "[Django] ERROR (EXTERNAL IP): message")
self.assertIn("path:%s" % url_path, msg.body)
@override_settings(
MANAGERS=[('manager', 'manager@example.com')],
DEBUG=False,
)
def test_customize_send_mail_method(self):
class ManagerEmailHandler(AdminEmailHandler):
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_managers(subject, message, *args, connection=self.connection(), **kwargs)
handler = ManagerEmailHandler()
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
self.assertEqual(len(mail.outbox), 0)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['manager@example.com'])
class SettingsConfigTest(AdminScriptTestCase):
"""
Test that accessing settings in a custom logging handler does not trigger
a circular import error.
"""
def setUp(self):
log_config = """{
'version': 1,
'handlers': {
'custom_handler': {
'level': 'INFO',
'class': 'logging_tests.logconfig.MyHandler',
}
}
}"""
self.write_settings('settings.py', sdict={'LOGGING': log_config})
def tearDown(self):
self.remove_settings('settings.py')
def test_circular_dependency(self):
# validate is just an example command to trigger settings configuration
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
def dictConfig(config):
dictConfig.called = True
dictConfig.called = False
class SetupConfigureLogging(TestCase):
"""
Test that calling django.setup() initializes the logging configuration.
"""
@override_settings(LOGGING_CONFIG='logging_tests.tests.dictConfig',
LOGGING=OLD_LOGGING)
def test_configure_initializes_logging(self):
from django import setup
setup()
self.assertTrue(dictConfig.called)
@override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls')
class SecurityLoggerTest(TestCase):
def test_suspicious_operation_creates_log_message(self):
with patch_logger('django.security.SuspiciousOperation', 'error') as calls:
self.client.get('/suspicious/')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], 'dubious')
def test_suspicious_operation_uses_sublogger(self):
with patch_logger('django.security.DisallowedHost', 'error') as calls:
self.client.get('/suspicious_spec/')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], 'dubious')
@override_settings(
ADMINS=[('admin', 'admin@example.com')],
DEBUG=False,
)
def test_suspicious_email_admins(self):
self.client.get('/suspicious/')
self.assertEqual(len(mail.outbox), 1)
self.assertIn('path:/suspicious/,', mail.outbox[0].body)
class SettingsCustomLoggingTest(AdminScriptTestCase):
"""
Test that using a logging defaults are still applied when using a custom
callable in LOGGING_CONFIG (i.e., logging.config.fileConfig).
"""
def setUp(self):
logging_conf = """
[loggers]
keys=root
[handlers]
keys=stream
[formatters]
keys=simple
[logger_root]
handlers=stream
[handler_stream]
class=StreamHandler
formatter=simple
args=(sys.stdout,)
[formatter_simple]
format=%(message)s
"""
self.temp_file = NamedTemporaryFile()
self.temp_file.write(logging_conf.encode('utf-8'))
self.temp_file.flush()
sdict = {'LOGGING_CONFIG': '"logging.config.fileConfig"',
'LOGGING': 'r"%s"' % self.temp_file.name}
self.write_settings('settings.py', sdict=sdict)
def tearDown(self):
self.temp_file.close()
self.remove_settings('settings.py')
def test_custom_logging(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
| {
"content_hash": "00742dc0455fd749927f3f29282917b5",
"timestamp": "",
"source": "github",
"line_count": 460,
"max_line_length": 116,
"avg_line_length": 34.53913043478261,
"alnum_prop": 0.6226711983887211,
"repo_name": "hackerbot/DjangoDev",
"id": "5ee1092c41baa1248ec15deca3f4a84ab13263f3",
"size": "15912",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/logging_tests/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "43080"
},
{
"name": "HTML",
"bytes": "171684"
},
{
"name": "JavaScript",
"bytes": "105066"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "10834007"
},
{
"name": "Shell",
"bytes": "3056"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
import random
def main(j, args, params, *other_args):
page = params.result = args.page
try:
macro_params = dict([p.strip() for p in param_pair.split('=')] for param_pair in args.cmdstr.split('|'))
except:
macro_params = {}
id = 'toc_' + str(random.randint(0, 9999))
page.addJS(jsLink='/jslib/old/tableofcontents/jquery.tableofcontents.min.js')
page.addJS(jsContent='''
$(document).ready(function(){{
$("#{0}").tableOfContents(
$("#{0}").parent(),
{{
startLevel: {1},
depth: {2}
}}
);
}});'''.format(id, macro_params.get('start', 1), macro_params.get('depth', 6)))
page.addMessage('<ul id="{0}"></ul>'.format(id))
return params
def match(j, args, params, tags, tasklet):
return True
| {
"content_hash": "5ca4ca4eaee05e8c2bb4de49be0f663d",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 112,
"avg_line_length": 35.86206896551724,
"alnum_prop": 0.43365384615384617,
"repo_name": "Jumpscale/jumpscale6_core",
"id": "bd5d67e6cb30077218adcf94352e5f042a7b1c7e",
"size": "1040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/portalbase/macros/page/toc/1_toc.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "3681"
},
{
"name": "HTML",
"bytes": "11738"
},
{
"name": "JavaScript",
"bytes": "70132"
},
{
"name": "Lua",
"bytes": "2162"
},
{
"name": "Python",
"bytes": "5848017"
},
{
"name": "Shell",
"bytes": "7692"
}
],
"symlink_target": ""
} |
import pickle
import socket
import threading
# We'll pickle a list of numbers:
someList = [ 1, 2, 7, 9, 0 ]
pickledList = pickle.dumps ( someList )
# Our thread class:
class ClientThread ( threading.Thread ):
# Override Thread's __init__ method to accept the parameters needed:
def __init__ ( self, channel, details ):
self.channel = channel
self.details = details
threading.Thread.__init__ ( self )
def run ( self ):
print 'Received connection:', self.details [ 0 ]
self.channel.send ( pickledList )
for x in xrange ( 10 ):
print self.channel.recv ( 1024 )
self.channel.close()
print 'Closed connection:', self.details [ 0 ]
# Set up the server:
server = socket.socket ( socket.AF_INET, socket.SOCK_STREAM )
server.bind ( ( '', 2727 ) )
server.listen ( 5 )
# Have the server serve "forever":
while True:
channel, details = server.accept()
ClientThread ( channel, details ).start()
| {
"content_hash": "307ddd3680bf3dd093730786e9d129bf",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 71,
"avg_line_length": 26.72222222222222,
"alnum_prop": 0.6486486486486487,
"repo_name": "vmiklos/vmexam",
"id": "d3e6189fe83b5d2f6d10b49daea7ec41661f5087",
"size": "962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/server/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1356"
},
{
"name": "C",
"bytes": "207141"
},
{
"name": "C#",
"bytes": "6115"
},
{
"name": "C++",
"bytes": "174284"
},
{
"name": "CMake",
"bytes": "90430"
},
{
"name": "Go",
"bytes": "13344"
},
{
"name": "HTML",
"bytes": "7421"
},
{
"name": "Java",
"bytes": "33479"
},
{
"name": "JavaScript",
"bytes": "15830"
},
{
"name": "JetBrains MPS",
"bytes": "93"
},
{
"name": "Kotlin",
"bytes": "12619"
},
{
"name": "M4",
"bytes": "4410"
},
{
"name": "Makefile",
"bytes": "133045"
},
{
"name": "Objective-C",
"bytes": "6102"
},
{
"name": "PDDL",
"bytes": "2562"
},
{
"name": "PHP",
"bytes": "10859"
},
{
"name": "Perl",
"bytes": "566936"
},
{
"name": "PowerShell",
"bytes": "618"
},
{
"name": "Python",
"bytes": "185940"
},
{
"name": "Rust",
"bytes": "40567"
},
{
"name": "Shell",
"bytes": "74062"
},
{
"name": "TypeScript",
"bytes": "45072"
},
{
"name": "VBA",
"bytes": "3117"
},
{
"name": "Vim Script",
"bytes": "1105"
},
{
"name": "XSLT",
"bytes": "281"
}
],
"symlink_target": ""
} |
import os
import sys
reload(sys)
sys.setdefaultencoding('utf8')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
from django.core.handlers.wsgi import WSGIHandler
application = WSGIHandler()
| {
"content_hash": "bc34b2f78741be09840b4cdf0c7d2ab2",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 66,
"avg_line_length": 21.2,
"alnum_prop": 0.7971698113207547,
"repo_name": "Adward-R/Info-Retrieval-Project",
"id": "7ebbe87afdb49fe4649a21aaabb62c33bd08093a",
"size": "235",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "djcode/mysite/django_wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2171"
},
{
"name": "HTML",
"bytes": "97108"
},
{
"name": "JavaScript",
"bytes": "344399"
},
{
"name": "Python",
"bytes": "26595"
}
],
"symlink_target": ""
} |
import os
import shutil
TEST_RESULTS = "testresults"
def setup(self):
if os.path.exists(TEST_RESULTS):
shutil.rmtree(TEST_RESULTS)
if not os.path.exists(TEST_RESULTS):
os.makedirs(TEST_RESULTS)
| {
"content_hash": "a74df24c1c84f8dd0b407fe8e2c33501",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 40,
"avg_line_length": 20.09090909090909,
"alnum_prop": 0.6832579185520362,
"repo_name": "murrayo/yape",
"id": "5f84f5dd7e1be7f6f18e5833ec0e468b75328356",
"size": "221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yape/tests/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "353"
},
{
"name": "Dockerfile",
"bytes": "1131"
},
{
"name": "HTML",
"bytes": "307"
},
{
"name": "Python",
"bytes": "92350"
},
{
"name": "Shell",
"bytes": "130"
}
],
"symlink_target": ""
} |
'''
Harness Toolset
Copyright (c) 2015 Rich Kelley
Contact:
@RGKelley5
RK5DEVMAIL[A T]gmail[D O T]com
www.frogstarworldc.com
License: MIT
'''
import threading
import builtins
import sys
from random import randint
from harness.core import framework
from harness.core import threads
from collections import namedtuple
from queue import Queue
class ModuleFrame(framework.Framework):
def __init__(self, about):
# -----------------------------------------------------
# Thread Events must be initialized before framework
# due to print function thread controls in ModuleFrame
# -----------------------------------------------------
self.stopper = threading.Event()
self.stopper.clear()
self.allow_print = threading.Event()
self.allow_print.isSet()
self.stdin_q = Queue()
self.FORCE_THREAD = False
# -----------------------------------------------------
framework.Framework.__init__(self)
self.prompt = "H_MOD(" + about["name"] + ") "
self.thread_to_return = None
self.module_id = randint(1, 100000)
# TODO: add exception handling for undeclared keys
self.name = about['name']
self.author = about['author']
self.info = about['info']
self.contact = about['contact']
self.version = about['version']
def isrunning(self):
if self.stopper.isSet():
return False
return True
def print(self, *objects, sep=' ', end='\n', file=sys.stdout, flush=False):
if self.allow_print.isSet():
return builtins.print(*objects, sep=sep, end=end, file=file, flush=flush)
def print_error(self, outstr):
if self.allow_print.isSet():
framework.Framework.print_error(self, outstr)
def print_output(self, outstr):
if self.allow_print.isSet():
framework.Framework.print_output(self, outstr)
def print_debug(self, outstr):
if self.allow_print.isSet():
framework.Framework.print_debug(self, outstr)
def add_session(self, remote_conn_info=None, local_conn_info=None, stype=None):
return framework.Framework.add_session(self, remote_conn_info=remote_conn_info, local_conn_info=local_conn_info, id=self.module_id, stype=stype)
def go(self, _globals):
self.framework_globals = _globals
self.cmdloop()
return self.thread_to_return, self.framework_globals # Return thread back to base for management
def do_back(self, args=None):
return True
def do_run(self, args=None):
if args:
_args = framework.parse_args(args)
else:
_args = (" ")
self.stopper.clear()
self.allow_print.set()
# Wrap the module in a Thread object and return to base
if self.FORCE_THREAD or _args[0].lower() in ('job', 'thread', 'j', 't'):
if self.FORCE_THREAD:
self.print_output("Module must be run in background!")
self.allow_print.clear()
t = threads.ModuleThread(target=self, args=[self.stopper, self.allow_print, self.module_id, self.stdin_q])
t.daemon = True
self.thread_to_return = t
return True
else:
# Normal run in foreground
try:
self.run_module()
# Exit the module cleanly without exiting framework
except KeyboardInterrupt:
pass
finally:
self.cleanup_exit()
def show_info(self, args=None):
print("\n\tModule Name: ", self.name)
print("\tAuthors: ", self.author)
print("\tContact: ", self.contact)
print("\tInfo: ", self.info)
print("\tVersion: ", self.version)
print()
def pre_run(self, args=None):
pass
def run_module(self, args=None):
pass
def post_run(self, args=None):
pass
def cleanup_exit(self):
self.print_debug("Cleaning up...")
self.stopper.clear()
self.post_run()
self.allow_print.clear()
self.print_output("Exiting module...")
return True
| {
"content_hash": "3258bacc9c864c2810e5dabb3b3ac795",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 146,
"avg_line_length": 20.685393258426966,
"alnum_prop": 0.6531776208582292,
"repo_name": "liorvh/Harness",
"id": "fe6f8561f25ead57e0b4c0036d61dea30e5c9181",
"size": "3682",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "harness/core/module.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "246928"
},
{
"name": "C#",
"bytes": "26596"
},
{
"name": "C++",
"bytes": "8995"
},
{
"name": "Objective-C",
"bytes": "403"
},
{
"name": "Python",
"bytes": "553362"
},
{
"name": "Shell",
"bytes": "210"
}
],
"symlink_target": ""
} |
"""Common NeRF loss functions.
Loss functions defined in this module return a scalar weight in addition to the
raw loss value. This enables both logging of the raw values as well as weight
scheduling defined by the caller.
"""
from typing import Callable, Optional, Tuple
from etils.array_types import FloatArray
import gin
import jax
import jax.numpy as jnp
def _get_norm_fn(name: str) -> Callable[[jnp.ndarray], jnp.ndarray]:
"""Maps the name of a reconstruction norm to a function."""
if name.lower() == "l1":
return jnp.abs
elif name.lower() == "l2":
return lambda a: a**2
else:
raise ValueError(f"Unknown norm function {name}.")
@gin.configurable(allowlist=["low_threshold", "high_threshold"])
def tri_mode_clipping(ground_truth: FloatArray[..., "C"],
predicted: FloatArray[..., "C"],
low_threshold: float = 0.0,
high_threshold: float = 1.0) -> FloatArray[..., "C"]:
"""An error clipping scheme for data saturated outside a given range.
For ground truth pixels outside this range, predicted pixels only affect the
loss if they are higher than the low ground truth pixel, or lower than the
high ground truth pixel. In this case, the predicted values are compared to
the threshold rather than the pixel value.
Args:
ground_truth: The ground truth RGB pixel values to be reconstructed.
predicted: Estimated RGB pixel values to be evaluated.
low_threshold: The lower edge of the range.
high_threshold: The upper end of the range.
Returns:
The clipped error value.
"""
assert low_threshold < high_threshold
# If groundtruth is above the high limit, only penalize predictions below it.
gt_high_mask = ground_truth > high_threshold
gt_high_error = jnp.maximum(high_threshold - predicted,
0.0)
gt_high_error *= gt_high_mask.astype(jnp.float32)
# If groundtruth is below the low limit only penalize predictions above it.
gt_low_mask = ground_truth < low_threshold
gt_low_error = jnp.minimum(low_threshold - predicted, 0.0)
gt_low_error *= gt_low_mask.astype(jnp.float32)
# Normal loss for in-range groundtruth.
in_range_mask = jnp.invert(gt_high_mask ^ gt_low_mask)
in_range_error = ground_truth - predicted
in_range_error *= in_range_mask.astype(jnp.float32)
return gt_high_error + gt_low_error + in_range_error
@gin.configurable(
"reconstruction_loss", allowlist=["weight", "norm", "use_tri_mode"])
def reconstruction(ground_truth: FloatArray[..., "C"],
predicted: FloatArray[..., "C"],
mask: Optional[FloatArray[...]] = None,
weight: float = 1.0,
norm: str = "l2",
use_tri_mode: bool = False) -> Tuple[FloatArray, float]:
"""A photometric reconstruction loss.
Args:
ground_truth: The ground truth RGB pixel values to be reconstructed.
predicted: Estimated RGB pixel values to be evaluated.
mask: Optional per-pixel weights for masking the contribution of certain
pixels to the loss.
weight: The scalar weight controlling the strength of the loss.
norm: Either 'l1' or 'l2' to set the reconstruction norm to be used.
use_tri_mode: If true, use tri-mode clipping on the error values.
Returns:
loss: The scalar loss value with no weight applied.
weight: The scalar weight controlling the strength of the loss.
"""
if use_tri_mode:
error = tri_mode_clipping(ground_truth, predicted)
else:
error = ground_truth - predicted
normed_error = _get_norm_fn(norm)(error)
if mask is not None:
normed_error *= mask
loss = jnp.mean(normed_error)
return loss, weight
@gin.configurable(
"normal_consistency_loss",
allowlist=["weight", "mode", "hold_analytic_normals_constant"])
def normal_consistency(analytic_normals: FloatArray[..., 3],
predicted_normals: FloatArray[..., 3],
mask: Optional[FloatArray[...]] = None,
weight: float = 0.0,
hold_analytic_normals_constant: bool = True,
mode: str = "error") -> Tuple[FloatArray, float]:
"""Loss for enforcing consistency between predicted and analytic normals.
Args:
analytic_normals: Normal vectors derived from a density field.
predicted_normals: Directly predicted normals to be supervised.
mask: Optional per-pixel weights for masking the contribution of certain
pixels to the loss.
weight: The scalar weight controlling the strength of the loss.
hold_analytic_normals_constant: If true, treat analytic normals as fixed
input instead of learnable output by applying stop_gradient.
mode: Either 'error' or 'cosine' to control whether L2 or cosine
distance is penalized.
Returns:
loss: The scalar loss value with no weight applied.
weight: The scalar weight controlling the strength of the loss.
"""
if hold_analytic_normals_constant:
analytic_normals = jax.lax.stop_gradient(analytic_normals)
if mode == "error":
error = (analytic_normals - predicted_normals)**2
elif mode == "cosine":
error = 1.0 - jnp.sum(analytic_normals * predicted_normals, axis=-1)
else:
raise ValueError(f"Unknown normal consistency loss mode {mode}.")
if mask is not None:
error *= mask
loss = jnp.mean(error)
return loss, weight
@gin.configurable("color_correction_regularization", allowlist=["weight"])
def color_correction_regularization(error: FloatArray[...],
weight: float = 0.0
) -> Tuple[FloatArray, float]:
"""Color correction regularization.
Args:
error: Color correction error values to be penalized.
weight: The scalar weight controlling the strength of the loss.
Returns:
loss: The scalar value with no weight applied.
weight: The scalar weight controlling the strength of the loss.
"""
return jnp.mean(error), weight
@gin.configurable("hard_surface_loss", allowlist=["weight"])
def hard_surface(sample_weights: FloatArray[...],
weight: float = 0.0) -> Tuple[FloatArray, float]:
"""Hard surface density regularizer loss.
Args:
sample_weights: Per-sample contribution weights from volume rendering.
weight: The scalar weight controlling the strength of the loss.
Returns:
loss: The scalar loss value with no weight applied.
weight: The scalar weight controlling the strength of the loss.
"""
loss = jnp.mean(-jnp.log(
jnp.exp(-jnp.abs(sample_weights)) +
jnp.exp(-jnp.abs(1.0 - sample_weights))))
return loss, weight
| {
"content_hash": "5979243cf28ad046bf7ac96290591376",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 79,
"avg_line_length": 37.357541899441344,
"alnum_prop": 0.6720502467474204,
"repo_name": "google-research/jax3d",
"id": "6d78fe0fd4a5f62519e9871f9f612b49476cb1e7",
"size": "7270",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "jax3d/projects/generative/nerf/losses.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "47972"
},
{
"name": "Python",
"bytes": "1239944"
}
],
"symlink_target": ""
} |
import base64
import logging
import socket
from typing import Any, Optional
import binascii
from ntlm_auth import gss_channel_bindings, ntlm
from mitmproxy import addonmanager, http
from mitmproxy import ctx
from mitmproxy.net.http import http1
from mitmproxy.proxy import commands, layer
from mitmproxy.proxy.context import Context
from mitmproxy.proxy.layers.http import HttpConnectUpstreamHook, HttpLayer, HttpStream
from mitmproxy.proxy.layers.http._upstream_proxy import HttpUpstreamProxy
class NTLMUpstreamAuth:
"""
This addon handles authentication to systems upstream from us for the
upstream proxy and reverse proxy mode. There are 3 cases:
- Upstream proxy CONNECT requests should have authentication added, and
subsequent already connected requests should not.
- Upstream proxy regular requests
- Reverse proxy regular requests (CONNECT is invalid in this mode)
"""
def load(self, loader: addonmanager.Loader) -> None:
logging.info("NTLMUpstreamAuth loader")
loader.add_option(
name="upstream_ntlm_auth",
typespec=Optional[str],
default=None,
help="""
Add HTTP NTLM authentication to upstream proxy requests.
Format: username:password.
"""
)
loader.add_option(
name="upstream_ntlm_domain",
typespec=Optional[str],
default=None,
help="""
Add HTTP NTLM domain for authentication to upstream proxy requests.
"""
)
loader.add_option(
name="upstream_proxy_address",
typespec=Optional[str],
default=None,
help="""
upstream poxy address.
"""
)
loader.add_option(
name="upstream_ntlm_compatibility",
typespec=int,
default=3,
help="""
Add HTTP NTLM compatibility for authentication to upstream proxy requests.
Valid values are 0-5 (Default: 3)
"""
)
logging.debug("AddOn: NTLM Upstream Authentication - Loaded")
def running(self):
def extract_flow_from_context(context: Context) -> http.HTTPFlow:
if context and context.layers:
for l in context.layers:
if isinstance(l, HttpLayer):
for _, stream in l.streams.items():
return stream.flow if isinstance(stream, HttpStream) else None
def build_connect_flow(context: Context, connect_header: tuple) -> http.HTTPFlow:
flow = extract_flow_from_context(context)
if not flow:
logging.error("failed to build connect flow")
raise
flow.request.content = b"" # we should send empty content for handshake
header_name, header_value = connect_header
flow.request.headers.add(header_name, header_value)
return flow
def patched_start_handshake(self) -> layer.CommandGenerator[None]:
assert self.conn.address
self.ntlm_context = CustomNTLMContext(ctx)
proxy_authorization = self.ntlm_context.get_ntlm_start_negotiate_message()
self.flow = build_connect_flow(self.context, ("Proxy-Authorization", proxy_authorization))
yield HttpConnectUpstreamHook(self.flow)
raw = http1.assemble_request(self.flow.request)
yield commands.SendData(self.tunnel_connection, raw)
def extract_proxy_authenticate_msg(response_head: list) -> str:
for header in response_head:
if b'Proxy-Authenticate' in header:
challenge_message = str(bytes(header).decode('utf-8'))
try:
token = challenge_message.split(': ')[1]
except IndexError:
logging.error("Failed to extract challenge_message")
raise
return token
def patched_receive_handshake_data(self, data) -> layer.CommandGenerator[tuple[bool, Optional[str]]]:
self.buf += data
response_head = self.buf.maybe_extract_lines()
if response_head:
response_head = [bytes(x) for x in response_head]
try:
response = http1.read_response_head(response_head)
except ValueError:
return True, None
challenge_message = extract_proxy_authenticate_msg(response_head)
if 200 <= response.status_code < 300:
if self.buf:
yield from self.receive_data(data)
del self.buf
return True, None
else:
if not challenge_message:
return True, None
proxy_authorization = self.ntlm_context.get_ntlm_challenge_response_message(challenge_message)
self.flow = build_connect_flow(self.context, ("Proxy-Authorization", proxy_authorization))
raw = http1.assemble_request(self.flow.request)
yield commands.SendData(self.tunnel_connection, raw)
return False, None
else:
return False, None
HttpUpstreamProxy.start_handshake = patched_start_handshake
HttpUpstreamProxy.receive_handshake_data = patched_receive_handshake_data
def done(self):
logging.info('close ntlm session')
addons = [
NTLMUpstreamAuth()
]
class CustomNTLMContext:
def __init__(self,
ctx,
preferred_type: str = 'NTLM',
cbt_data: gss_channel_bindings.GssChannelBindingsStruct = None):
# TODO:// take care the cbt_data
auth: str = ctx.options.upstream_ntlm_auth
domain: str = str(ctx.options.upstream_ntlm_domain).upper()
ntlm_compatibility: int = ctx.options.upstream_ntlm_compatibility
username, password = tuple(auth.split(":"))
workstation = socket.gethostname().upper()
logging.debug(f'\nntlm context with the details: "{domain}\\{username}", *****')
self.preferred_type = preferred_type
self.ntlm_context = ntlm.NtlmContext(
username=username,
password=password,
domain=domain,
workstation=workstation,
ntlm_compatibility=ntlm_compatibility,
cbt_data=cbt_data)
def get_ntlm_start_negotiate_message(self) -> str:
negotiate_message = self.ntlm_context.step()
negotiate_message_base_64_in_bytes = base64.b64encode(negotiate_message)
negotiate_message_base_64_ascii = negotiate_message_base_64_in_bytes.decode("ascii")
negotiate_message_base_64_final = f'{self.preferred_type} {negotiate_message_base_64_ascii}'
logging.debug(
f'{self.preferred_type} Authentication, negotiate message: {negotiate_message_base_64_final}'
)
return negotiate_message_base_64_final
def get_ntlm_challenge_response_message(self, challenge_message: str) -> Any:
challenge_message = challenge_message.replace(self.preferred_type + " ", "", 1)
try:
challenge_message_ascii_bytes = base64.b64decode(challenge_message, validate=True)
except binascii.Error as err:
logging.debug(f'{self.preferred_type} Authentication fail with error {err.__str__()}')
return False
authenticate_message = self.ntlm_context.step(challenge_message_ascii_bytes)
negotiate_message_base_64 = '{} {}'.format(self.preferred_type,
base64.b64encode(authenticate_message).decode('ascii'))
logging.debug(
f'{self.preferred_type} Authentication, response to challenge message: {negotiate_message_base_64}'
)
return negotiate_message_base_64
| {
"content_hash": "a49c26677d98676bf072ab22cf631f4a",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 114,
"avg_line_length": 43.3494623655914,
"alnum_prop": 0.6008929678779611,
"repo_name": "mhils/mitmproxy",
"id": "656d48b3ad67ddd215f6e15db04d033339a716f3",
"size": "8063",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "examples/contrib/ntlm_upstream_proxy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3618"
},
{
"name": "Dockerfile",
"bytes": "618"
},
{
"name": "HTML",
"bytes": "10672"
},
{
"name": "JavaScript",
"bytes": "134086"
},
{
"name": "Kaitai Struct",
"bytes": "3670"
},
{
"name": "Less",
"bytes": "21203"
},
{
"name": "PowerShell",
"bytes": "258"
},
{
"name": "Python",
"bytes": "2367991"
},
{
"name": "Shell",
"bytes": "3055"
},
{
"name": "TypeScript",
"bytes": "279053"
}
],
"symlink_target": ""
} |
"""
This module contains the implementation of SSHSession, which (by default)
allows access to a shell and a python interpreter over SSH.
Maintainer: Paul Swartz
"""
import struct
from twisted.internet import protocol
from twisted.python import log
from twisted.conch.interfaces import ISession
from twisted.conch.ssh import common, channel
class SSHSession(channel.SSHChannel):
name = 'session'
def __init__(self, *args, **kw):
channel.SSHChannel.__init__(self, *args, **kw)
self.buf = ''
self.client = None
self.session = None
def request_subsystem(self, data):
subsystem, ignored= common.getNS(data)
log.msg('asking for subsystem "%s"' % subsystem)
client = self.avatar.lookupSubsystem(subsystem, data)
if client:
pp = SSHSessionProcessProtocol(self)
proto = wrapProcessProtocol(pp)
client.makeConnection(proto)
pp.makeConnection(wrapProtocol(client))
self.client = pp
return 1
else:
log.msg('failed to get subsystem')
return 0
def request_shell(self, data):
log.msg('getting shell')
if not self.session:
self.session = ISession(self.avatar)
try:
pp = SSHSessionProcessProtocol(self)
self.session.openShell(pp)
except:
log.deferr()
return 0
else:
self.client = pp
return 1
def request_exec(self, data):
if not self.session:
self.session = ISession(self.avatar)
f,data = common.getNS(data)
log.msg('executing command "%s"' % f)
try:
pp = SSHSessionProcessProtocol(self)
self.session.execCommand(pp, f)
except:
log.deferr()
return 0
else:
self.client = pp
return 1
def request_pty_req(self, data):
if not self.session:
self.session = ISession(self.avatar)
term, windowSize, modes = parseRequest_pty_req(data)
log.msg('pty request: %s %s' % (term, windowSize))
try:
self.session.getPty(term, windowSize, modes)
except:
log.err()
return 0
else:
return 1
def request_window_change(self, data):
if not self.session:
self.session = ISession(self.avatar)
winSize = parseRequest_window_change(data)
try:
self.session.windowChanged(winSize)
except:
log.msg('error changing window size')
log.err()
return 0
else:
return 1
def dataReceived(self, data):
if not self.client:
#self.conn.sendClose(self)
self.buf += data
return
self.client.transport.write(data)
def extReceived(self, dataType, data):
if dataType == connection.EXTENDED_DATA_STDERR:
if self.client and hasattr(self.client.transport, 'writeErr'):
self.client.transport.writeErr(data)
else:
log.msg('weird extended data: %s'%dataType)
def eofReceived(self):
if self.session:
self.session.eofReceived()
elif self.client:
self.conn.sendClose(self)
def closed(self):
if self.session:
self.session.closed()
elif self.client:
self.client.transport.loseConnection()
#def closeReceived(self):
# self.loseConnection() # don't know what to do with this
def loseConnection(self):
if self.client:
self.client.transport.loseConnection()
channel.SSHChannel.loseConnection(self)
class _ProtocolWrapper(protocol.ProcessProtocol):
"""
This class wraps a L{Protocol} instance in a L{ProcessProtocol} instance.
"""
def __init__(self, proto):
self.proto = proto
def connectionMade(self): self.proto.connectionMade()
def outReceived(self, data): self.proto.dataReceived(data)
def processEnded(self, reason): self.proto.connectionLost(reason)
class _DummyTransport:
def __init__(self, proto):
self.proto = proto
def dataReceived(self, data):
self.proto.transport.write(data)
def write(self, data):
self.proto.dataReceived(data)
def writeSequence(self, seq):
self.write(''.join(seq))
def loseConnection(self):
self.proto.connectionLost(protocol.connectionDone)
def wrapProcessProtocol(inst):
if isinstance(inst, protocol.Protocol):
return _ProtocolWrapper(inst)
else:
return inst
def wrapProtocol(proto):
return _DummyTransport(proto)
class SSHSessionProcessProtocol(protocol.ProcessProtocol):
# __implements__ = I
def __init__(self, session):
self.session = session
def connectionMade(self):
if self.session.buf:
self.transport.write(self.session.buf)
self.session.buf = None
def outReceived(self, data):
self.session.write(data)
def errReceived(self, err):
self.session.writeExtended(connection.EXTENDED_DATA_STDERR, err)
def inConnectionLost(self):
self.session.conn.sendEOF(self.session)
def connectionLost(self, reason = None):
self.session.loseConnection()
def processEnded(self, reason = None):
if reason and hasattr(reason.value, 'exitCode'):
log.msg('exitCode: %s' % repr(reason.value.exitCode))
self.session.conn.sendRequest(self.session, 'exit-status', struct.pack('!L', reason.value.exitCode))
self.session.loseConnection()
# transport stuff (we are also a transport!)
def write(self, data):
self.session.write(data)
def writeSequence(self, seq):
self.session.write(''.join(seq))
def loseConnection(self):
self.session.loseConnection()
class SSHSessionClient(protocol.Protocol):
def dataReceived(self, data):
if self.transport:
self.transport.write(data)
# methods factored out to make live easier on server writers
def parseRequest_pty_req(data):
"""Parse the data from a pty-req request into usable data.
@returns: a tuple of (terminal type, (rows, cols, xpixel, ypixel), modes)
"""
term, rest = common.getNS(data)
cols, rows, xpixel, ypixel = struct.unpack('>4L', rest[: 16])
modes, ignored= common.getNS(rest[16:])
winSize = (rows, cols, xpixel, ypixel)
modes = [(ord(modes[i]), struct.unpack('>L', modes[i+1: i+5])[0]) for i in range(0, len(modes)-1, 5)]
return term, winSize, modes
def packRequest_pty_req(term, (rows, cols, xpixel, ypixel), modes):
"""Pack a pty-req request so that it is suitable for sending.
NOTE: modes must be packed before being sent here.
"""
termPacked = common.NS(term)
winSizePacked = struct.pack('>4L', cols, rows, xpixel, ypixel)
modesPacked = common.NS(modes) # depend on the client packing modes
return termPacked + winSizePacked + modesPacked
def parseRequest_window_change(data):
"""Parse the data from a window-change request into usuable data.
@returns: a tuple of (rows, cols, xpixel, ypixel)
"""
cols, rows, xpixel, ypixel = struct.unpack('>4L', data)
return rows, cols, xpixel, ypixel
def packRequest_window_change((rows, cols, xpixel, ypixel)):
"""Pack a window-change request so that it is suitable for sending.
"""
return struct.pack('>4L', cols, rows, xpixel, ypixel)
import connection
| {
"content_hash": "c45fcfabd97af4961c52b36c3bbf07e4",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 112,
"avg_line_length": 30.40562248995984,
"alnum_prop": 0.6219786025624092,
"repo_name": "hortonworks/hortonworks-sandbox",
"id": "246fd189df2104ad5cb0e1d040e99e23af2a881e",
"size": "7714",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/Twisted/twisted/conch/ssh/session.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "27264"
},
{
"name": "Assembly",
"bytes": "207947"
},
{
"name": "C",
"bytes": "10279874"
},
{
"name": "C++",
"bytes": "208068"
},
{
"name": "CSS",
"bytes": "356769"
},
{
"name": "Emacs Lisp",
"bytes": "3171"
},
{
"name": "Java",
"bytes": "3064179"
},
{
"name": "JavaScript",
"bytes": "1532806"
},
{
"name": "PHP",
"bytes": "4160"
},
{
"name": "Perl",
"bytes": "139518"
},
{
"name": "Python",
"bytes": "27735073"
},
{
"name": "R",
"bytes": "12290"
},
{
"name": "Ruby",
"bytes": "5050"
},
{
"name": "Shell",
"bytes": "42062"
},
{
"name": "XSLT",
"bytes": "585"
}
],
"symlink_target": ""
} |
from google.appengine.api import users
import logging
def require_user(controller):
"""
Requires that a user is logged in
"""
if not controller.user:
return (False, "You must be logged in")
return True
def require_admin(controller):
"""
Requires that a user is logged in and that the user is and administrator on the App Engine Application
"""
if not users.is_current_user_admin():
return (False, "You must be an administrator")
return True
def predicate_chain(predicate, chain):
"""
Returns the result of chain if predicate returns True, otherwise returns True.
"""
def inner(*args, **kwargs):
predicate_curried = predicate(*args, **kwargs)
def inner_inner(controller):
if predicate_curried(controller):
return chain(controller)
return True
return inner_inner
return inner
def prefix_predicate(prefix):
prefix = prefix if isinstance(prefix, (list, tuple)) else (prefix,)
def inner(controller):
if controller.route.prefix in prefix:
return True
return False
return inner
def action_predicate(action):
action = action if isinstance(action, (list, tuple)) else (action,)
def inner(controller):
if controller.route.action in action:
return True
return False
return inner
def route_predicate(route):
route = route if isinstance(route, (list, tuple)) else (route,)
def inner(controller):
if controller.route.name in route:
return True
return False
return inner
require_user_for_prefix = predicate_chain(prefix_predicate, require_user)
require_admin_for_prefix = predicate_chain(prefix_predicate, require_admin)
require_user_for_action = predicate_chain(action_predicate, require_user)
require_admin_for_action = predicate_chain(action_predicate, require_admin)
require_user_for_route = predicate_chain(route_predicate, require_user)
require_admin_for_route = predicate_chain(route_predicate, require_admin)
| {
"content_hash": "602d73e3a722d5f8a8c7f7494ecf6255",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 106,
"avg_line_length": 27.407894736842106,
"alnum_prop": 0.6764282285165627,
"repo_name": "yowmamasita/social-listener-exam",
"id": "f6376ecba59cb91689636d0bfc065da3eea2b66c",
"size": "2083",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ferris/core/auth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2510"
},
{
"name": "Python",
"bytes": "197882"
},
{
"name": "Shell",
"bytes": "2268"
}
],
"symlink_target": ""
} |
""" S3 Synchronization: Peer Repository Adapter
@copyright: 2011-15 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import datetime
import glob
import os
import sys
try:
from lxml import etree
except ImportError:
print >> sys.stderr, "ERROR: lxml module needed for XML handling"
raise
from gluon import *
from ..s3sync import S3SyncBaseAdapter
# =============================================================================
class S3SyncAdapter(S3SyncBaseAdapter):
"""
File Synchronization Adapter
"""
def register(self):
"""
Register this site at the peer repository
@return: True to indicate success, otherwise False
"""
# No registration needed
return True
# -------------------------------------------------------------------------
def login(self):
"""
Login at the peer repository
@return: None if successful, otherwise the error
"""
# No explicit login required
return None
# -------------------------------------------------------------------------
def pull(self, task, onconflict=None):
"""
Fetch updates from the peer repository and import them
into the local database (active pull)
@param task: the synchronization task (sync_task Row)
@param onconflict: callback for automatic conflict resolution
@return: tuple (error, mtime), with error=None if successful,
else error=message, and mtime=modification timestamp
of the youngest record sent
"""
repository = self.repository
log = repository.log
error = None
result = None
# Instantiate the target resource
tablename = task.resource_name
if tablename == "mixed":
resource = None
mixed = True
else:
try:
resource = current.s3db.resource(tablename)
except SyntaxError:
result = log.FATAL
error = msg = sys.exc_info()[1]
mixed = False
# Get input files
if not result:
input_files = self._input_files(task)
if not input_files:
result = log.SUCCESS
msg = "No files to import"
# Instantiate back-end
if not result:
adapter = None
backend = repository.backend
if not backend:
backend = "eden"
backend = "s3.sync_adapter.%s" % backend
try:
name = "S3SyncAdapter"
api = getattr(__import__(backend, fromlist=[name]), name)
except ImportError:
result = log.FATAL
error = msg = "Unsupported back-end: %s" % backend
else:
adapter = api(repository)
# If any of the previous actions has produced a non-default result:
if result:
# Log the operation and return
log.write(repository_id = repository.id,
resource_name = tablename,
transmission = log.OUT,
mode = log.PULL,
action = None,
remote = False,
result = result,
message = msg,
)
return (error, None)
# Set strategy and policies
from ..s3import import S3ImportItem
strategy = task.strategy
conflict_policy = task.conflict_policy
if not conflict_policy:
conflict_policy = S3ImportItem.POLICY.MASTER
update_policy = task.update_policy
if not update_policy:
update_policy = S3ImportItem.POLICY.NEWER
if update_policy not in ("THIS", "OTHER"):
last_sync = task.last_pull
else:
last_sync = None
# Import the files
error = None
mtime = None
for f in input_files:
current.log.debug("FileSync: importing %s" % f)
try:
with open(f, "r") as source:
result = adapter.receive([source],
resource,
strategy = strategy,
update_policy = update_policy,
conflict_policy = conflict_policy,
onconflict = onconflict,
last_sync = last_sync,
mixed = mixed,
)
except IOError:
msg = sys.exc_info()[1]
current.log.warning(msg)
continue
status = result["status"]
# Log the operation
log.write(repository_id = repository.id,
resource_name = tablename,
transmission = log.OUT,
mode = log.PULL,
action = "import %s" % f,
remote = result["remote"],
result = status,
message = result["message"],
)
if status in (log.ERROR, log.FATAL):
error = "Error while importing %s" % f
current.log.error(error)
mtime = None
else:
if resource:
mtime = resource.mtime
else:
mtime = current.request.utcnow
if task.delete_input_files:
try:
os.remove(f)
except os.error:
current.log.warning("FileSync: can not delete %s" % f)
else:
current.log.debug("FileSync: %s deleted" % f)
return error, mtime
# -------------------------------------------------------------------------
def push(self, task):
"""
Extract new updates from the local database and send
them to the peer repository (active push)
@param task: the synchronization task (sync_task Row)
@return: tuple (error, mtime), with error=None if successful,
else error=message, and mtime=modification timestamp
of the youngest record sent
"""
repository = self.repository
log = repository.log
error = None
result = None
# Instantiate the target resource
tablename = task.resource_name
if tablename == "mixed":
resource = None
mixed = True
else:
try:
resource = current.s3db.resource(tablename,
include_deleted=True,
)
except SyntaxError:
result = log.FATAL
error = msg = sys.exc_info()[1]
mixed = False
# Get output file name
if not result:
outfile = self._output_file(task)
if not outfile:
result = log.ERROR
if repository.path:
error = msg = "No pattern configured for output file name"
else:
error = msg = "No file path configured for repository"
# Instantiate the back-end
if not result:
adapter = None
backend = repository.backend
if not backend:
backend = "eden"
backend = "s3.sync_adapter.%s" % backend
try:
name = "S3SyncAdapter"
api = getattr(__import__(backend, fromlist=[name]), name)
except ImportError:
result = log.FATAL
error = msg = "Unsupported back-end: %s" % backend
else:
adapter = api(repository)
# If any of the previous actions has produced a non-default result:
if result:
# Log the operation and return
log.write(repository_id = repository.id,
resource_name = tablename,
transmission = log.OUT,
mode = log.PUSH,
action = None,
remote = False,
result = result,
message = msg,
)
return (error, None)
# Update policy and msince
from ..s3import import S3ImportItem
update_policy = task.update_policy
if not update_policy:
update_policy = S3ImportItem.POLICY.NEWER
if update_policy not in ("THIS", "OTHER"):
msince = task.last_push
else:
msince = None
# Sync filters for this task
filters = current.sync.get_filters(task.id)
# Export the data through the back-end adapter (send)
error = None
mtime = None
action = "data export"
output = adapter.send(resource,
msince = msince,
filters = filters,
mixed = mixed,
pretty_print = task.human_readable,
)
status = output.get("status")
if status in (log.ERROR, log.FATAL):
result = status
msg = output.get("message")
if not msg:
msg = "Error while exporting data"
error = msg
else:
response = output.get("response")
path = repository.path
if not os.path.exists(path):
# Try to create it
try:
os.makedirs(path)
except OSError:
result = log.FATAL
error = msg = sys.exc_info()[1]
if not error:
try:
action = "open %s" % outfile
with open(outfile, "w") as target:
target.write(response)
except IOError:
result = log.FATAL
error = msg = sys.exc_info()[1]
else:
result = log.SUCCESS
msg = "Data successfully written to %s" % outfile
if resource:
msg = "%s (%s records)" % (msg, resource.results)
mtime = resource.muntil
else:
mtime = current.request.utcnow
# Log the operation
log.write(repository_id = repository.id,
resource_name = task.resource_name,
transmission = log.OUT,
mode = log.PUSH,
action = action,
remote = False,
result = result,
message = msg,
)
return (error, mtime)
# -------------------------------------------------------------------------
def send(self,
resource,
start=None,
limit=None,
msince=None,
filters=None,
mixed=False,
pretty_print=False):
"""
Respond to an incoming pull from the peer repository
@param resource: the resource to be synchronized
@param start: index of the first record to send
@param limit: maximum number of records to send
@param msince: minimum modification date/time for records to send
@param filters: URL filters for record extraction
@param mixed: negotiate resource with peer (disregard resource)
@param pretty_print: make the output human-readable
@return: a dict {status, remote, message, response}, with:
- status....the outcome of the operation
- remote....whether the error was remote (or local)
- message...the log message
- response..the response to send to the peer
"""
msg = "Send not supported for this repository type"
return {"status": self.log.FATAL,
"remote": False,
"message": msg,
"response": None,
}
# -------------------------------------------------------------------------
def receive(self,
source,
resource,
strategy=None,
update_policy=None,
conflict_policy=None,
onconflict=None,
last_sync=None,
mixed=False):
"""
Respond to an incoming push from the peer repository
@param source: the input stream (list of file-like objects)
@param resource: the target resource
@param strategy: the import strategy
@param update_policy: the update policy
@param conflict_policy: the conflict resolution policy
@param onconflict: callback for conflict resolution
@param last_sync: the last synchronization date/time for the peer
@param mixed: negotiate resource with peer (disregard resource)
@return: a dict {status, remote, message, response}, with:
- status....the outcome of the operation
- remote....whether the error was remote (or local)
- message...the log message
- response..the response to send to the peer
"""
msg = "Receive not supported for this repository type"
return {"status": self.log.FATAL,
"remote": False,
"message": msg,
"response": None,
}
# -------------------------------------------------------------------------
def _input_files(self, task):
"""
Helper function to get all relevant input files from the
repository path, excluding files which have not been modified
since the last pull of the task
@param task: the synchronization task
@return: a list of file paths, ordered by their time
stamp (oldest first)
"""
path = self.repository.path
if not os.path.isabs(path):
path = os.path.join(current.request.folder, path)
pattern = task.infile_pattern
if path and pattern:
pattern = os.path.join(path, pattern)
else:
return []
all_files = glob.glob(pattern)
infiles = []
append = infiles.append
msince = task.last_pull
for f in filter(os.path.isfile, all_files):
mtime = datetime.datetime.utcfromtimestamp(os.path.getmtime(f))
# Disregard files which have not been modified since the last pull
if msince and mtime <= msince:
continue
append((mtime, f))
# Sort by mtime
infiles.sort(key=lambda item: item[0])
return [item[1] for item in infiles]
# -------------------------------------------------------------------------
def _output_file(self, task):
"""
Helper function to construct the output file name from
the repository path and the output file name pattern
@param task: the synchronization task
@return: the output file name, or None if either
path or pattern are missing
"""
path = self.repository.path
if not os.path.isabs(path):
path = os.path.join(current.request.folder, path)
pattern = task.outfile_pattern
if not path or not pattern:
return None
# Substitute placeholders in pattern
from string import Template
template = Template(pattern).safe_substitute(
year="%(y)04d",
month="%(m)02d",
day="%(d)02d",
hour="%(H)02d",
minute="%(M)02d",
second="%(S)02d",
timestamp="%(y)04d%(m)02d%(d)02d%(H)02d%(M)02d%(S)02d"
)
# Fill in the template
now = current.request.utcnow
filename = template % {"y": now.year,
"m": now.month,
"d": now.day,
"H": now.hour,
"M": now.minute,
"S": now.second,
}
# Prepend path
outfile = os.path.join(path, filename)
return outfile
# End =========================================================================
| {
"content_hash": "33b4d183e3aa9e46a7bd26842f805c08",
"timestamp": "",
"source": "github",
"line_count": 516,
"max_line_length": 79,
"avg_line_length": 35.38953488372093,
"alnum_prop": 0.4782322983407261,
"repo_name": "sahana/Turkey",
"id": "78f9cb9bd0b93976656af87438ea119cc10d0176",
"size": "18286",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "modules/s3/sync_adapter/filesync.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "727"
},
{
"name": "CSS",
"bytes": "3336714"
},
{
"name": "HTML",
"bytes": "1369269"
},
{
"name": "JavaScript",
"bytes": "20093511"
},
{
"name": "NSIS",
"bytes": "3934"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "31303565"
},
{
"name": "Ruby",
"bytes": "8291"
},
{
"name": "Shell",
"bytes": "5059"
},
{
"name": "XSLT",
"bytes": "3208049"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/space/booster/shared_booster_mk4.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "d5434d7bdea812dab179827277c314ff",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 80,
"avg_line_length": 23.76923076923077,
"alnum_prop": 0.6957928802588996,
"repo_name": "obi-two/Rebelion",
"id": "6509360fd755a5d28e0e8b593d1e1c78cb90d2fe",
"size": "454",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/draft_schematic/space/booster/shared_booster_mk4.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
"""
Product model base implementation -- you do not need to use this
It may save you some typing though.
"""
from django.db import models
import plata
class ProductBase(models.Model):
"""
Product models must have two methods to be usable with Plata:
- ``get_price``: Return a price instance
- ``handle_order_item``: Fill in fields on the order item from the product,
i.e. the name and the stock keeping unit.
"""
class Meta:
abstract = True
def get_price(self, currency=None, orderitem=None):
"""
This method is part of the public, required API of products. It returns
either a price instance or raises a ``DoesNotExist`` exception.
If you need more complex pricing schemes, override this method with your
own implementation.
"""
if currency is None:
currency = (orderitem.currency if orderitem else
plata.shop_instance().default_currency())
try:
# Let's hope that ordering=[-id] from the base price definition
# makes any sense here :-)
return self.prices.filter(currency=currency)[0]
except IndexError:
raise self.prices.model.DoesNotExist
def handle_order_item(self, orderitem):
"""
This method has to ensure that the information on the order item is
sufficient for posteriority. Old orders should always be complete
even if the products have been changed or deleted in the meantime.
"""
orderitem.name = unicode(self)
orderitem.sku = getattr(self, 'sku', u'')
| {
"content_hash": "7e6ff0bccac28e425f3a757c1756eb47",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 80,
"avg_line_length": 32.5,
"alnum_prop": 0.64,
"repo_name": "allink/plata",
"id": "b682a351b7685f1140c1a51a6543cd5e1db82f4d",
"size": "1625",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plata/product/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "383270"
}
],
"symlink_target": ""
} |
from datetime import date
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from vcms.www.fields import StatusField
class BlogPageManager(models.Manager):
def get_blog_page_from_string(self, page_name):
page = self.get(slug=page_name)
if page :
return page
raise ObjectDoesNotExist
class PublishedBlogPageManager(models.Manager):
def get_blog_page(self, page_name):
page = self.get(slug=page_name)
if page and page.status == StatusField.PUBLISHED :
return page
return None
class BlogPostCategoryManager(models.Manager):
def get_category_for_page(self, page, counts=True):
from vcms.simpleblogs.models import BlogPost
categories_for_page = {}
for category in self.all():
posts = BlogPost.objects.get_all_for_page(page, category=category)
if posts:
categories_for_page[category.name] = {'model': category
,'count' : posts.count()
}
return categories_for_page
class PublishedNewsBlogPostManager(models.Manager):
def get_published(self, queryset=None):
query = self
if queryset:
query = queryset
return query.filter(status=StatusField.PUBLISHED)
def get_unpublished(self):
return self.filter(status=StatusField.DRAFT)
def get_category_for_page(self, page):
query = self.get_for_page(page)
return query
def get_latest(self, queryset=None):
query = self
if queryset:
query = queryset
return query.get_published().order_by("-date_published")
def get_for_page(self, page, queryset=None):
query = self
if queryset:
query = queryset
query = self.get_published(queryset=query)
return query.filter(display_on_page=page)
def get_for_page_by_date(self, page, category=None, year=None, month=1, day=1, post_id=None):
query = self.get_latest()
query = self.get_for_page(page, queryset=query).filter(date_published__year=int(year)).filter(date_published__month=int(month))
if post_id:
return query.filter(id=post_id)
return query
def get_archive_for_page(self, page, category=None, year=None, month=1, day=1):
query = self.get_published()
query = self.get_for_page(page, queryset=query)
return query.filter(date_published__lt=date(year, month, 1))
def get_all_for_page(self, page, category=None):
query = self.get_latest()
query = self.get_for_page(page, queryset=query)
if category:
query = query.filter(category=category)
return query
def get_latest_post_for_page(self, page, qty=1, category=None):
if category:
return self.get_latest().filter(display_on_page=page).filter(category__slug__contains=category.slug)[:qty]
return self.get_latest().filter(display_on_page=page)[:qty]
| {
"content_hash": "55b36fc6efbae4f6dc71272295e57d6e",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 136,
"avg_line_length": 36.48837209302326,
"alnum_prop": 0.6153601019757807,
"repo_name": "francisl/vcms",
"id": "f08292527eee9be77fbfbd952fa0793873ff1504",
"size": "3293",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/vcms/simpleblogs/managers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "1125438"
},
{
"name": "Perl",
"bytes": "696"
},
{
"name": "Python",
"bytes": "197188"
},
{
"name": "Shell",
"bytes": "3588"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import calendar
import json
import os
import sys
import time
import uuid
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import massage_qps_stats
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
sys.path.append(gcp_utils_dir)
import big_query_utils
_PROJECT_ID = 'grpc-testing'
def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file):
with open(result_file, 'r') as f:
(col1, col2, col3) = f.read().split(',')
latency50 = float(col1.strip()) * 1000
latency90 = float(col2.strip()) * 1000
latency99 = float(col3.strip()) * 1000
scenario_result = {
'scenario': {
'name': 'netperf_tcp_rr'
},
'summary': {
'latency50': latency50,
'latency90': latency90,
'latency99': latency99
}
}
bq = big_query_utils.create_big_query()
_create_results_table(bq, dataset_id, table_id)
if not _insert_result(
bq, dataset_id, table_id, scenario_result, flatten=False):
print('Error uploading result to bigquery.')
sys.exit(1)
def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file,
metadata_file, node_info_file,
prometheus_query_results_file):
with open(result_file, 'r') as f:
scenario_result = json.loads(f.read())
bq = big_query_utils.create_big_query()
_create_results_table(bq, dataset_id, table_id)
if not _insert_scenario_result(bq, dataset_id, table_id, scenario_result,
metadata_file, node_info_file,
prometheus_query_results_file):
print('Error uploading result to bigquery.')
sys.exit(1)
def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True):
if flatten:
_flatten_result_inplace(scenario_result)
_populate_metadata_inplace(scenario_result)
row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
[row])
def _insert_scenario_result(bq,
dataset_id,
table_id,
scenario_result,
test_metadata_file,
node_info_file,
prometheus_query_results_file,
flatten=True):
if flatten:
_flatten_result_inplace(scenario_result)
_populate_metadata_from_file(scenario_result, test_metadata_file)
_populate_node_metadata_from_file(scenario_result, node_info_file)
_populate_prometheus_query_results_from_file(scenario_result,
prometheus_query_results_file)
row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
[row])
def _create_results_table(bq, dataset_id, table_id):
with open(os.path.dirname(__file__) + '/scenario_result_schema.json',
'r') as f:
table_schema = json.loads(f.read())
desc = 'Results of performance benchmarks.'
return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id,
table_schema, desc)
def _flatten_result_inplace(scenario_result):
"""Bigquery is not really great for handling deeply nested data
and repeated fields. To maintain values of some fields while keeping
the schema relatively simple, we artificially leave some of the fields
as JSON strings.
"""
scenario_result['scenario']['clientConfig'] = json.dumps(
scenario_result['scenario']['clientConfig'])
scenario_result['scenario']['serverConfig'] = json.dumps(
scenario_result['scenario']['serverConfig'])
scenario_result['latencies'] = json.dumps(scenario_result['latencies'])
scenario_result['serverCpuStats'] = []
for stats in scenario_result['serverStats']:
scenario_result['serverCpuStats'].append(dict())
scenario_result['serverCpuStats'][-1]['totalCpuTime'] = stats.pop(
'totalCpuTime', None)
scenario_result['serverCpuStats'][-1]['idleCpuTime'] = stats.pop(
'idleCpuTime', None)
for stats in scenario_result['clientStats']:
stats['latencies'] = json.dumps(stats['latencies'])
stats.pop('requestResults', None)
scenario_result['serverCores'] = json.dumps(scenario_result['serverCores'])
scenario_result['clientSuccess'] = json.dumps(
scenario_result['clientSuccess'])
scenario_result['serverSuccess'] = json.dumps(
scenario_result['serverSuccess'])
scenario_result['requestResults'] = json.dumps(
scenario_result.get('requestResults', []))
scenario_result['serverCpuUsage'] = scenario_result['summary'].pop(
'serverCpuUsage', None)
scenario_result['summary'].pop('successfulRequestsPerSecond', None)
scenario_result['summary'].pop('failedRequestsPerSecond', None)
massage_qps_stats.massage_qps_stats(scenario_result)
def _populate_metadata_inplace(scenario_result):
"""Populates metadata based on environment variables set by Jenkins."""
# NOTE: Grabbing the Kokoro environment variables will only work if the
# driver is running locally on the same machine where Kokoro has started
# the job. For our setup, this is currently the case, so just assume that.
build_number = os.getenv('KOKORO_BUILD_NUMBER')
build_url = 'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
'KOKORO_BUILD_ID')
job_name = os.getenv('KOKORO_JOB_NAME')
git_commit = os.getenv('KOKORO_GIT_COMMIT')
# actual commit is the actual head of PR that is getting tested
# TODO(jtattermusch): unclear how to obtain on Kokoro
git_actual_commit = os.getenv('ghprbActualCommit')
utc_timestamp = str(calendar.timegm(time.gmtime()))
metadata = {'created': utc_timestamp}
if build_number:
metadata['buildNumber'] = build_number
if build_url:
metadata['buildUrl'] = build_url
if job_name:
metadata['jobName'] = job_name
if git_commit:
metadata['gitCommit'] = git_commit
if git_actual_commit:
metadata['gitActualCommit'] = git_actual_commit
scenario_result['metadata'] = metadata
def _populate_metadata_from_file(scenario_result, test_metadata_file):
utc_timestamp = str(calendar.timegm(time.gmtime()))
metadata = {'created': utc_timestamp}
_annotation_to_bq_metadata_key_map = {
'ci_' + key: key for key in (
'buildNumber',
'buildUrl',
'jobName',
'gitCommit',
'gitActualCommit',
)
}
if os.access(test_metadata_file, os.R_OK):
with open(test_metadata_file, 'r') as f:
test_metadata = json.loads(f.read())
# eliminate managedFields from metadata set
if 'managedFields' in test_metadata:
del test_metadata['managedFields']
annotations = test_metadata.get('annotations', {})
# if use kubectl apply ..., kubectl will append current configuration to
# annotation, the field is deleted since it includes a lot of irrelevant
# information
if 'kubectl.kubernetes.io/last-applied-configuration' in annotations:
del annotations['kubectl.kubernetes.io/last-applied-configuration']
# dump all metadata as JSON to testMetadata field
scenario_result['testMetadata'] = json.dumps(test_metadata)
for key, value in _annotation_to_bq_metadata_key_map.items():
if key in annotations:
metadata[value] = annotations[key]
scenario_result['metadata'] = metadata
def _populate_node_metadata_from_file(scenario_result, node_info_file):
node_metadata = {'driver': {}, 'servers': [], 'clients': []}
_node_info_to_bq_node_metadata_key_map = {
'Name': 'name',
'PodIP': 'podIP',
'NodeName': 'nodeName',
}
if os.access(node_info_file, os.R_OK):
with open(node_info_file, 'r') as f:
file_metadata = json.loads(f.read())
for key, value in _node_info_to_bq_node_metadata_key_map.items():
node_metadata['driver'][value] = file_metadata['Driver'][key]
for clientNodeInfo in file_metadata['Clients']:
node_metadata['clients'].append({
value: clientNodeInfo[key] for key, value in
_node_info_to_bq_node_metadata_key_map.items()
})
for serverNodeInfo in file_metadata['Servers']:
node_metadata['servers'].append({
value: serverNodeInfo[key] for key, value in
_node_info_to_bq_node_metadata_key_map.items()
})
scenario_result['nodeMetadata'] = node_metadata
def _populate_prometheus_query_results_from_file(scenario_result,
prometheus_query_result_file):
"""Populate the results from Prometheus query to Bigquery table """
if os.access(prometheus_query_result_file, os.R_OK):
with open(prometheus_query_result_file, 'r', encoding='utf8') as f:
file_query_results = json.loads(f.read())
scenario_result['testDurationSeconds'] = file_query_results[
'testDurationSeconds']
clientsPrometheusData = []
if 'clients' in file_query_results:
for client_name, client_data in file_query_results[
'clients'].items():
clientPrometheusData = {'name': client_name}
containersPrometheusData = []
for container_name, container_data in client_data.items():
containerPrometheusData = {
'name': container_name,
'cpuSeconds': container_data['cpuSeconds'],
'memoryMean': container_data['memoryMean'],
}
containersPrometheusData.append(containerPrometheusData)
clientPrometheusData[
'containers'] = containersPrometheusData
clientsPrometheusData.append(clientPrometheusData)
scenario_result['clientsPrometheusData'] = clientsPrometheusData
serversPrometheusData = []
if 'servers' in file_query_results:
for server_name, server_data in file_query_results[
'servers'].items():
serverPrometheusData = {'name': server_name}
containersPrometheusData = []
for container_name, container_data in server_data.items():
containerPrometheusData = {
'name': container_name,
'cpuSeconds': container_data['cpuSeconds'],
'memoryMean': container_data['memoryMean'],
}
containersPrometheusData.append(containerPrometheusData)
serverPrometheusData[
'containers'] = containersPrometheusData
serversPrometheusData.append(serverPrometheusData)
scenario_result['serversPrometheusData'] = serversPrometheusData
argp = argparse.ArgumentParser(description='Upload result to big query.')
argp.add_argument('--bq_result_table',
required=True,
default=None,
type=str,
help='Bigquery "dataset.table" to upload results to.')
argp.add_argument('--file_to_upload',
default='scenario_result.json',
type=str,
help='Report file to upload.')
argp.add_argument('--metadata_file_to_upload',
default='metadata.json',
type=str,
help='Metadata file to upload.')
argp.add_argument('--node_info_file_to_upload',
default='node_info.json',
type=str,
help='Node information file to upload.')
argp.add_argument('--prometheus_query_results_to_upload',
default='prometheus_query_result.json',
type=str,
help='Prometheus query result file to upload.')
argp.add_argument('--file_format',
choices=['scenario_result', 'netperf_latency_csv'],
default='scenario_result',
help='Format of the file to upload.')
args = argp.parse_args()
dataset_id, table_id = args.bq_result_table.split('.', 2)
if args.file_format == 'netperf_latency_csv':
_upload_netperf_latency_csv_to_bigquery(dataset_id, table_id,
args.file_to_upload)
else:
_upload_scenario_result_to_bigquery(dataset_id, table_id,
args.file_to_upload,
args.metadata_file_to_upload,
args.node_info_file_to_upload,
args.prometheus_query_results_to_upload)
print('Successfully uploaded %s, %s, %s and %s to BigQuery.\n' %
(args.file_to_upload, args.metadata_file_to_upload,
args.node_info_file_to_upload, args.prometheus_query_results_to_upload))
| {
"content_hash": "07b0bba30f029522f7b72da404171ade",
"timestamp": "",
"source": "github",
"line_count": 322,
"max_line_length": 85,
"avg_line_length": 42.642857142857146,
"alnum_prop": 0.5896147403685092,
"repo_name": "ctiller/grpc",
"id": "b449e10505abe598c87159cbffd845b397293297",
"size": "14390",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/run_tests/performance/bq_upload_result.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "5444"
},
{
"name": "Batchfile",
"bytes": "38831"
},
{
"name": "C",
"bytes": "1377708"
},
{
"name": "C#",
"bytes": "106367"
},
{
"name": "C++",
"bytes": "16352561"
},
{
"name": "CMake",
"bytes": "29311"
},
{
"name": "CSS",
"bytes": "1519"
},
{
"name": "Cython",
"bytes": "258768"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "Dockerfile",
"bytes": "179860"
},
{
"name": "Go",
"bytes": "34794"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "Java",
"bytes": "13923"
},
{
"name": "JavaScript",
"bytes": "5572"
},
{
"name": "Objective-C",
"bytes": "724357"
},
{
"name": "Objective-C++",
"bytes": "79351"
},
{
"name": "PHP",
"bytes": "486781"
},
{
"name": "PowerShell",
"bytes": "4516"
},
{
"name": "Python",
"bytes": "3814801"
},
{
"name": "Ruby",
"bytes": "650063"
},
{
"name": "Shell",
"bytes": "766652"
},
{
"name": "Starlark",
"bytes": "805915"
},
{
"name": "Swift",
"bytes": "7487"
},
{
"name": "XSLT",
"bytes": "9846"
}
],
"symlink_target": ""
} |
import json
import unittest
from unittest import mock
from azure.batch import BatchServiceClient, models as batch_models
from airflow.models import Connection
from airflow.providers.microsoft.azure.hooks.azure_batch import AzureBatchHook
from airflow.utils import db
class TestAzureBatchHook(unittest.TestCase):
# set up the test environment
def setUp(self):
# set up the test variable
self.test_vm_conn_id = "test_azure_batch_vm"
self.test_cloud_conn_id = "test_azure_batch_cloud"
self.test_account_name = "test_account_name"
self.test_account_key = "test_account_key"
self.test_account_url = "http://test-endpoint:29000"
self.test_vm_size = "test-vm-size"
self.test_vm_publisher = "test.vm.publisher"
self.test_vm_offer = "test.vm.offer"
self.test_vm_sku = "test-sku"
self.test_cloud_os_family = "test-family"
self.test_cloud_os_version = "test-version"
self.test_node_agent_sku = "test-node-agent-sku"
# connect with vm configuration
db.merge_conn(
Connection(
conn_id=self.test_vm_conn_id,
conn_type="azure_batch",
extra=json.dumps(
{
"account_name": self.test_account_name,
"account_key": self.test_account_key,
"account_url": self.test_account_url,
"vm_publisher": self.test_vm_publisher,
"vm_offer": self.test_vm_offer,
"vm_sku": self.test_vm_sku,
"node_agent_sku_id": self.test_node_agent_sku,
}
),
)
)
# connect with cloud service
db.merge_conn(
Connection(
conn_id=self.test_cloud_conn_id,
conn_type="azure_batch",
extra=json.dumps(
{
"account_name": self.test_account_name,
"account_key": self.test_account_key,
"account_url": self.test_account_url,
"os_family": self.test_cloud_os_family,
"os_version": self.test_cloud_os_version,
"node_agent_sku_id": self.test_node_agent_sku,
}
),
)
)
def test_connection_and_client(self):
hook = AzureBatchHook(azure_batch_conn_id=self.test_vm_conn_id)
assert isinstance(hook._connection(), Connection)
assert isinstance(hook.get_conn(), BatchServiceClient)
def test_configure_pool_with_vm_config(self):
hook = AzureBatchHook(azure_batch_conn_id=self.test_vm_conn_id)
pool = hook.configure_pool(
pool_id='mypool',
vm_size="test_vm_size",
target_dedicated_nodes=1,
vm_publisher="test.vm.publisher",
vm_offer="test.vm.offer",
sku_starts_with="test-sku",
)
assert isinstance(pool, batch_models.PoolAddParameter)
def test_configure_pool_with_cloud_config(self):
hook = AzureBatchHook(azure_batch_conn_id=self.test_cloud_conn_id)
pool = hook.configure_pool(
pool_id='mypool',
vm_size="test_vm_size",
target_dedicated_nodes=1,
vm_publisher="test.vm.publisher",
vm_offer="test.vm.offer",
sku_starts_with="test-sku",
)
assert isinstance(pool, batch_models.PoolAddParameter)
def test_configure_pool_with_latest_vm(self):
with mock.patch(
"airflow.providers.microsoft.azure.hooks."
"azure_batch.AzureBatchHook._get_latest_verified_image_vm_and_sku"
) as mock_getvm:
hook = AzureBatchHook(azure_batch_conn_id=self.test_cloud_conn_id)
getvm_instance = mock_getvm
getvm_instance.return_value = ['test-image', 'test-sku']
pool = hook.configure_pool(
pool_id='mypool',
vm_size="test_vm_size",
use_latest_image_and_sku=True,
vm_publisher="test.vm.publisher",
vm_offer="test.vm.offer",
sku_starts_with="test-sku",
)
assert isinstance(pool, batch_models.PoolAddParameter)
@mock.patch("airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient")
def test_create_pool_with_vm_config(self, mock_batch):
hook = AzureBatchHook(azure_batch_conn_id=self.test_vm_conn_id)
mock_instance = mock_batch.return_value.pool.add
pool = hook.configure_pool(
pool_id='mypool',
vm_size="test_vm_size",
target_dedicated_nodes=1,
vm_publisher="test.vm.publisher",
vm_offer="test.vm.offer",
sku_starts_with="test-sku",
)
hook.create_pool(pool=pool)
mock_instance.assert_called_once_with(pool)
@mock.patch("airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient")
def test_create_pool_with_cloud_config(self, mock_batch):
hook = AzureBatchHook(azure_batch_conn_id=self.test_cloud_conn_id)
mock_instance = mock_batch.return_value.pool.add
pool = hook.configure_pool(
pool_id='mypool',
vm_size="test_vm_size",
target_dedicated_nodes=1,
vm_publisher="test.vm.publisher",
vm_offer="test.vm.offer",
sku_starts_with="test-sku",
)
hook.create_pool(pool=pool)
mock_instance.assert_called_once_with(pool)
@mock.patch("airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient")
def test_wait_for_all_nodes(self, mock_batch):
# TODO: Add test
pass
@mock.patch("airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient")
def test_job_configuration_and_create_job(self, mock_batch):
hook = AzureBatchHook(azure_batch_conn_id=self.test_vm_conn_id)
mock_instance = mock_batch.return_value.job.add
job = hook.configure_job(job_id='myjob', pool_id='mypool')
hook.create_job(job)
assert isinstance(job, batch_models.JobAddParameter)
mock_instance.assert_called_once_with(job)
@mock.patch('airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient')
def test_add_single_task_to_job(self, mock_batch):
hook = AzureBatchHook(azure_batch_conn_id=self.test_vm_conn_id)
mock_instance = mock_batch.return_value.task.add
task = hook.configure_task(task_id="mytask", command_line="echo hello")
hook.add_single_task_to_job(job_id='myjob', task=task)
assert isinstance(task, batch_models.TaskAddParameter)
mock_instance.assert_called_once_with(job_id="myjob", task=task)
@mock.patch('airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient')
def test_wait_for_all_task_to_complete(self, mock_batch):
# TODO: Add test
pass
| {
"content_hash": "dc6aaa85d1197f94ab373dc8e153b70d",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 89,
"avg_line_length": 42.26190476190476,
"alnum_prop": 0.5929577464788732,
"repo_name": "dhuang/incubator-airflow",
"id": "4361170b18fda4ce345defac12b9592b2dd0ab92",
"size": "7889",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/providers/microsoft/azure/hooks/test_azure_batch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "109698"
},
{
"name": "HTML",
"bytes": "264851"
},
{
"name": "JavaScript",
"bytes": "1988427"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Python",
"bytes": "3357958"
},
{
"name": "Shell",
"bytes": "34442"
}
],
"symlink_target": ""
} |
"""Build data lineage build.
Example:
python3 ./deploy_data_lineage.py -p <GCP_ENVIRONMENT>
"""
import argparse
import sys
import datetime
from deployment_utils import BQUtils
from load_data_lineage import LoadDomainLevelDataLineage
from load_data_lineage import LoadProjectLevelDataLineage
from load_data_lineage import LoadQueryLevelDataLineage
from load_data_lineage import LoadDataLineageBuild
from sql_graph import GrizzlyLoader
def main(args: argparse.Namespace):
"""Implement the command line interface described in the module doc string."""
bq_utils = BQUtils(gcp_project_id=args.gcp_project_id)
LoadDataLineageBuild.create_dl_build_log_table(bq_utils)
results = bq_utils.bq_client.query(
query="select * from etl_log.vw_build_data_lineage_queq").result()
for row in results:
print(f"Build_id={row.build_id}: build_datetime={row.build_datetime}")
print(f"Started data loading at {datetime.datetime.now()}")
loader = GrizzlyLoader(
gcp_project=args.gcp_project_id,
datetime=row.dt_build_datetime,
)
print(f"Finished data loading at {datetime.datetime.now()}")
print("Calculating Query-Level Data Lineage")
LoadQueryLevelDataLineage(
bq_utils=bq_utils,
loader=loader,
build_id=row.build_id,
build_datetime=row.dt_build_datetime,
).load_data()
print("Calculating Domain-Level Data Lineage")
LoadDomainLevelDataLineage(
bq_utils=bq_utils,
loader=loader,
build_id=row.build_id,
build_datetime=row.dt_build_datetime,
).load_data()
print("Calculating Project-Level Data Lineage")
LoadProjectLevelDataLineage(
bq_utils=bq_utils,
loader=loader,
build_id=row.build_id,
build_datetime=row.dt_build_datetime,
).load_data()
print("Writing Data Lineage build info to BQ")
LoadDataLineageBuild(
bq_utils=bq_utils,
build_id=row.build_id
).load_data()
print("=" * 20 + "\n")
if __name__ == "__main__":
try:
# Construct the argument parser
ap = argparse.ArgumentParser(
description="Script used for "
"Deploy data lineage."
)
# Add the arguments to the parser
ap.add_argument(
"-p",
"--project",
dest="gcp_project_id",
required=True,
help="Target GCP project")
arguments = ap.parse_args()
main(args=arguments)
except:
print("Unexpected error:", sys.exc_info()[1])
raise
| {
"content_hash": "b6b1eb13b40d4de4bb6c3d8ae3697c83",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 80,
"avg_line_length": 27.46153846153846,
"alnum_prop": 0.6682673069227691,
"repo_name": "google/grizzly",
"id": "b5f3c0eb54fbae555a1bd3a2c390cac06e119ac3",
"size": "3075",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "scripts/deploy_data_lineage.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1075"
},
{
"name": "Dockerfile",
"bytes": "1294"
},
{
"name": "HCL",
"bytes": "107097"
},
{
"name": "HTML",
"bytes": "1152"
},
{
"name": "JavaScript",
"bytes": "52626"
},
{
"name": "Jinja",
"bytes": "8031"
},
{
"name": "Python",
"bytes": "569193"
},
{
"name": "Shell",
"bytes": "13761"
}
],
"symlink_target": ""
} |
"""Factory method for easily getting imdbs by name."""
__sets = {}
from datasets.pascal_voc import pascal_voc
from datasets.coco import coco
import numpy as np
# Set up voc_<year>_<split> using selective search "fast" mode
for year in ['2007', '2012', '0712']:
for split in ['train', 'val', 'trainval', 'test']:
name = 'voc_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: pascal_voc(split, year))
# Set up coco_2014_<split>
for year in ['2014']:
for split in ['train', 'val', 'minival', 'valminusminival']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
# Set up coco_2015_<split>
for year in ['2015']:
for split in ['test', 'test-dev']:
name = 'coco_{}_{}'.format(year, split)
__sets[name] = (lambda split=split, year=year: coco(split, year))
def get_imdb(name):
"""Get an imdb (image database) by name."""
if not __sets.has_key(name):
raise KeyError('Unknown dataset: {}'.format(name))
return __sets[name]()
def list_imdbs():
"""List all registered imdbs."""
return __sets.keys()
| {
"content_hash": "fbbf6531e11e2b0a4603b80ae6ffb093",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 79,
"avg_line_length": 32.30555555555556,
"alnum_prop": 0.6087704213241617,
"repo_name": "Orpine/py-R-FCN",
"id": "339ea13c8ecd5542be6e2ea3a27cc0b4a4ae9009",
"size": "1412",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/datasets/factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9632"
},
{
"name": "C++",
"bytes": "146"
},
{
"name": "Cuda",
"bytes": "5064"
},
{
"name": "Makefile",
"bytes": "56"
},
{
"name": "Matlab",
"bytes": "1821"
},
{
"name": "Python",
"bytes": "212741"
},
{
"name": "Shell",
"bytes": "12404"
}
],
"symlink_target": ""
} |
"""
Generic views that provide commonly needed behaviour.
"""
from rest_framework import views, mixins
from rest_framework.settings import api_settings
from django.views.generic.detail import SingleObjectMixin
from django.views.generic.list import MultipleObjectMixin
### Base classes for the generic views ###
class GenericAPIView(views.APIView):
"""
Base class for all other generic views.
"""
model = None
serializer_class = None
model_serializer_class = api_settings.DEFAULT_MODEL_SERIALIZER_CLASS
def get_serializer_context(self):
"""
Extra context provided to the serializer class.
"""
return {
'request': self.request,
'format': self.format_kwarg,
'view': self
}
def get_serializer_class(self):
"""
Return the class to use for the serializer.
Defaults to using `self.serializer_class`, falls back to constructing a
model serializer class using `self.model_serializer_class`, with
`self.model` as the model.
"""
serializer_class = self.serializer_class
if serializer_class is None:
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.model
serializer_class = DefaultSerializer
return serializer_class
def get_serializer(self, instance=None, data=None, files=None):
"""
Return the serializer instance that should be used for validating and
deserializing input, and for serializing output.
"""
serializer_class = self.get_serializer_class()
context = self.get_serializer_context()
return serializer_class(instance, data=data, files=files, context=context)
class MultipleObjectAPIView(MultipleObjectMixin, GenericAPIView):
"""
Base class for generic views onto a queryset.
"""
paginate_by = api_settings.PAGINATE_BY
paginate_by_param = api_settings.PAGINATE_BY_PARAM
pagination_serializer_class = api_settings.DEFAULT_PAGINATION_SERIALIZER_CLASS
filter_backend = api_settings.FILTER_BACKEND
def filter_queryset(self, queryset):
"""
Given a queryset, filter it with whichever filter backend is in use.
"""
if not self.filter_backend:
return queryset
backend = self.filter_backend()
return backend.filter_queryset(self.request, queryset, self)
def get_pagination_serializer(self, page=None):
"""
Return a serializer instance to use with paginated data.
"""
class SerializerClass(self.pagination_serializer_class):
class Meta:
object_serializer_class = self.get_serializer_class()
pagination_serializer_class = SerializerClass
context = self.get_serializer_context()
return pagination_serializer_class(instance=page, context=context)
def get_paginate_by(self, queryset):
"""
Return the size of pages to use with pagination.
"""
if self.paginate_by_param:
query_params = self.request.QUERY_PARAMS
try:
return int(query_params[self.paginate_by_param])
except (KeyError, ValueError):
pass
return self.paginate_by
class SingleObjectAPIView(SingleObjectMixin, GenericAPIView):
"""
Base class for generic views onto a model instance.
"""
pk_url_kwarg = 'pk' # Not provided in Django 1.3
slug_url_kwarg = 'slug' # Not provided in Django 1.3
slug_field = 'slug'
def get_object(self, queryset=None):
"""
Override default to add support for object-level permissions.
"""
obj = super(SingleObjectAPIView, self).get_object(queryset)
if not self.has_permission(self.request, obj):
self.permission_denied(self.request)
return obj
### Concrete view classes that provide method handlers ###
### by composing the mixin classes with a base view. ###
class CreateAPIView(mixins.CreateModelMixin,
GenericAPIView):
"""
Concrete view for creating a model instance.
"""
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class ListAPIView(mixins.ListModelMixin,
MultipleObjectAPIView):
"""
Concrete view for listing a queryset.
"""
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class RetrieveAPIView(mixins.RetrieveModelMixin,
SingleObjectAPIView):
"""
Concrete view for retrieving a model instance.
"""
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class DestroyAPIView(mixins.DestroyModelMixin,
SingleObjectAPIView):
"""
Concrete view for deleting a model instance.
"""
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class UpdateAPIView(mixins.UpdateModelMixin,
SingleObjectAPIView):
"""
Concrete view for updating a model instance.
"""
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
class ListCreateAPIView(mixins.ListModelMixin,
mixins.CreateModelMixin,
MultipleObjectAPIView):
"""
Concrete view for listing a queryset or creating a model instance.
"""
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class RetrieveDestroyAPIView(mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
SingleObjectAPIView):
"""
Concrete view for retrieving or deleting a model instance.
"""
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class RetrieveUpdateDestroyAPIView(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
SingleObjectAPIView):
"""
Concrete view for retrieving, updating or deleting a model instance.
"""
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
| {
"content_hash": "a9370491e09da2051fb1098de03b8077",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 82,
"avg_line_length": 31.804651162790698,
"alnum_prop": 0.6286926001754899,
"repo_name": "cloudcopy/seahub",
"id": "dd8dfcf8da1c6ac175145919eab8c73d26ec0ead",
"size": "6838",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "thirdpart/rest_framework/generics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "231001"
},
{
"name": "HTML",
"bytes": "756152"
},
{
"name": "JavaScript",
"bytes": "2430927"
},
{
"name": "PLSQL",
"bytes": "16796"
},
{
"name": "Python",
"bytes": "1508638"
},
{
"name": "Shell",
"bytes": "9365"
}
],
"symlink_target": ""
} |
"""
pygments.sphinxext
~~~~~~~~~~~~~~~~~~
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import sys
from docutils import nodes
from docutils.statemachine import ViewList
from sphinx.util.compat import Directive
from sphinx.util.nodes import nested_parse_with_titles
MODULEDOC = '''
.. module:: %s
%s
%s
'''
LEXERDOC = '''
.. class:: %s
:Short names: %s
:Filenames: %s
:MIME types: %s
%s
'''
FMTERDOC = '''
.. class:: %s
:Short names: %s
:Filenames: %s
%s
'''
FILTERDOC = '''
.. class:: %s
:Name: %s
%s
'''
class PygmentsDoc(Directive):
"""
A directive to collect all lexers/formatters/filters and generate
autoclass directives for them.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
self.filenames = set()
if self.arguments[0] == 'lexers':
out = self.document_lexers()
elif self.arguments[0] == 'formatters':
out = self.document_formatters()
elif self.arguments[0] == 'filters':
out = self.document_filters()
else:
raise Exception('invalid argument for "pygmentsdoc" directive')
node = nodes.compound()
vl = ViewList(out.split('\n'), source='')
nested_parse_with_titles(self.state, vl, node)
for fn in self.filenames:
self.state.document.settings.record_dependencies.add(fn)
return node.children
def document_lexers(self):
from pygments.lexers._mapping import LEXERS
out = []
modules = {}
moduledocstrings = {}
for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
module = data[0]
mod = __import__(module, None, None, [classname])
self.filenames.add(mod.__file__)
cls = getattr(mod, classname)
if not cls.__doc__:
print("Warning: %s does not have a docstring." % classname)
docstring = cls.__doc__
if isinstance(docstring, bytes):
docstring = docstring.decode('utf8')
modules.setdefault(module, []).append((
classname,
', '.join(data[2]) or 'None',
', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
', '.join(data[4]) or 'None',
docstring))
if module not in moduledocstrings:
moddoc = mod.__doc__
if isinstance(moddoc, bytes):
moddoc = moddoc.decode('utf8')
moduledocstrings[module] = moddoc
for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
if moduledocstrings[module] is None:
raise Exception("Missing docstring for %s" % (module,))
heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
out.append(MODULEDOC % (module, heading, '-'*len(heading)))
for data in lexers:
out.append(LEXERDOC % data)
return ''.join(out)
def document_formatters(self):
from pygments.formatters import FORMATTERS
out = []
for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
module = data[0]
mod = __import__(module, None, None, [classname])
self.filenames.add(mod.__file__)
cls = getattr(mod, classname)
docstring = cls.__doc__
if isinstance(docstring, bytes):
docstring = docstring.decode('utf8')
heading = cls.__name__
out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
', '.join(data[3]).replace('*', '\\*') or 'None',
docstring))
return ''.join(out)
def document_filters(self):
from pygments.filters import FILTERS
out = []
for name, cls in FILTERS.items():
self.filenames.add(sys.modules[cls.__module__].__file__)
docstring = cls.__doc__
if isinstance(docstring, bytes):
docstring = docstring.decode('utf8')
out.append(FILTERDOC % (cls.__name__, name, docstring))
return ''.join(out)
def setup(app):
app.add_directive('pygmentsdoc', PygmentsDoc)
| {
"content_hash": "455295baa66dbcbb822c4fe730c23b46",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 84,
"avg_line_length": 29.67948717948718,
"alnum_prop": 0.5485961123110151,
"repo_name": "amaozhao/blogular",
"id": "2dc9810fb370e547c34c9dc519062785ee9d5266",
"size": "4654",
"binary": false,
"copies": "23",
"ref": "refs/heads/master",
"path": "pygments/sphinxext.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "630768"
},
{
"name": "HTML",
"bytes": "152862"
},
{
"name": "JavaScript",
"bytes": "855396"
},
{
"name": "Python",
"bytes": "4112262"
}
],
"symlink_target": ""
} |
import six
from cassandra import cluster
from cassandra.cluster import ContinuousPagingOptions
from cassandra.datastax.graph.fluent import DseGraph
from cassandra.graph import VertexProperty
from tests.integration import greaterthanorequaldse68
from tests.integration.advanced.graph import (
GraphUnitTestCase, ClassicGraphSchema, CoreGraphSchema,
VertexLabel, GraphTestConfiguration
)
from tests.integration import greaterthanorequaldse60
from tests.integration.advanced.graph.fluent import (
BaseExplicitExecutionTest, create_traversal_profiles, check_equality_base)
import unittest
@greaterthanorequaldse60
@GraphTestConfiguration.generate_tests(traversal=True)
class BatchStatementTests(BaseExplicitExecutionTest):
def setUp(self):
super(BatchStatementTests, self).setUp()
self.ep_graphson2, self.ep_graphson3 = create_traversal_profiles(self.cluster, self.graph_name)
def _test_batch_with_schema(self, schema, graphson):
"""
Sends a Batch statement and verifies it has succeeded with a schema created
@since 1.1.0
@jira_ticket PYTHON-789
@expected_result ValueError is arisen
@test_category dse graph
"""
self._send_batch_and_read_results(schema, graphson)
def _test_batch_without_schema(self, schema, graphson):
"""
Sends a Batch statement and verifies it has succeeded without a schema created
@since 1.1.0
@jira_ticket PYTHON-789
@expected_result ValueError is arisen
@test_category dse graph
"""
if schema is not ClassicGraphSchema:
raise unittest.SkipTest('schema-less is only for classic graphs')
self._send_batch_and_read_results(schema, graphson, use_schema=False)
def _test_batch_with_schema_add_all(self, schema, graphson):
"""
Sends a Batch statement and verifies it has succeeded with a schema created.
Uses :method:`dse_graph.query._BatchGraphStatement.add_all` to add the statements
instead of :method:`dse_graph.query._BatchGraphStatement.add`
@since 1.1.0
@jira_ticket PYTHON-789
@expected_result ValueError is arisen
@test_category dse graph
"""
self._send_batch_and_read_results(schema, graphson, add_all=True)
def _test_batch_without_schema_add_all(self, schema, graphson):
"""
Sends a Batch statement and verifies it has succeeded without a schema created
Uses :method:`dse_graph.query._BatchGraphStatement.add_all` to add the statements
instead of :method:`dse_graph.query._BatchGraphStatement.add`
@since 1.1.0
@jira_ticket PYTHON-789
@expected_result ValueError is arisen
@test_category dse graph
"""
if schema is not ClassicGraphSchema:
raise unittest.SkipTest('schema-less is only for classic graphs')
self._send_batch_and_read_results(schema, graphson, add_all=True, use_schema=False)
def test_only_graph_traversals_are_accepted(self):
"""
Verifies that ValueError is risen if the parameter add is not a traversal
@since 1.1.0
@jira_ticket PYTHON-789
@expected_result ValueError is arisen
@test_category dse graph
"""
batch = DseGraph.batch()
self.assertRaises(ValueError, batch.add, '{"@value":{"step":[["addV","poc_int"],'
'["property","bigint1value",{"@value":12,"@type":"g:Int32"}]]},'
'"@type":"g:Bytecode"}')
another_batch = DseGraph.batch()
self.assertRaises(ValueError, batch.add, another_batch)
def _send_batch_and_read_results(self, schema, graphson, add_all=False, use_schema=True):
traversals = []
datatypes = schema.fixtures.datatypes()
values = {}
g = self.fetch_traversal_source(graphson)
ep = self.get_execution_profile(graphson)
batch = DseGraph.batch(session=self.session,
execution_profile=self.get_execution_profile(graphson, traversal=True))
for data in six.itervalues(datatypes):
typ, value, deserializer = data
vertex_label = VertexLabel([typ])
property_name = next(six.iterkeys(vertex_label.non_pk_properties))
values[property_name] = value
if use_schema or schema is CoreGraphSchema:
schema.create_vertex_label(self.session, vertex_label, execution_profile=ep)
traversal = g.addV(str(vertex_label.label)).property('pkid', vertex_label.id).property(property_name, value)
if not add_all:
batch.add(traversal)
traversals.append(traversal)
if add_all:
batch.add_all(traversals)
self.assertEqual(len(datatypes), len(batch))
batch.execute()
vertices = self.execute_traversal(g.V(), graphson)
self.assertEqual(len(vertices), len(datatypes), "g.V() returned {}".format(vertices))
# Iterate over all the vertices and check that they match the original input
for vertex in vertices:
schema.ensure_properties(self.session, vertex, execution_profile=ep)
key = [k for k in list(vertex.properties.keys()) if k != 'pkid'][0].replace("value", "")
original = values[key]
self._check_equality(original, vertex)
def _check_equality(self, original, vertex):
for key in vertex.properties:
if key == 'pkid':
continue
value = vertex.properties[key].value \
if isinstance(vertex.properties[key], VertexProperty) else vertex.properties[key][0].value
check_equality_base(self, original, value)
class ContinuousPagingOptionsForTests(ContinuousPagingOptions):
def __init__(self,
page_unit=ContinuousPagingOptions.PagingUnit.ROWS, max_pages=1, # max_pages=1
max_pages_per_second=0, max_queue_size=4):
super(ContinuousPagingOptionsForTests, self).__init__(page_unit, max_pages, max_pages_per_second,
max_queue_size)
def reset_paging_options():
cluster.ContinuousPagingOptions = ContinuousPagingOptions
@greaterthanorequaldse68
@GraphTestConfiguration.generate_tests(schema=CoreGraphSchema)
class GraphPagingTest(GraphUnitTestCase):
def setUp(self):
super(GraphPagingTest, self).setUp()
self.addCleanup(reset_paging_options)
self.ep_graphson2, self.ep_graphson3 = create_traversal_profiles(self.cluster, self.graph_name)
def _setup_data(self, schema, graphson):
self.execute_graph(
"schema.vertexLabel('person').ifNotExists().partitionBy('name', Text).property('age', Int).create();",
graphson)
for i in range(100):
self.execute_graph("g.addV('person').property('name', 'batman-{}')".format(i), graphson)
def _test_cont_paging_is_enabled_by_default(self, schema, graphson):
"""
Test that graph paging is automatically enabled with a >=6.8 cluster.
@jira_ticket PYTHON-1045
@expected_result the default continuous paging options are used
@test_category dse graph
"""
# with traversals... I don't have access to the response future... so this is a hack to ensure paging is on
cluster.ContinuousPagingOptions = ContinuousPagingOptionsForTests
ep = self.get_execution_profile(graphson, traversal=True)
self._setup_data(schema, graphson)
self.session.default_fetch_size = 10
g = DseGraph.traversal_source(self.session, execution_profile=ep)
results = g.V().toList()
self.assertEqual(len(results), 10) # only 10 results due to our hack
def _test_cont_paging_can_be_disabled(self, schema, graphson):
"""
Test that graph paging can be disabled.
@jira_ticket PYTHON-1045
@expected_result the default continuous paging options are not used
@test_category dse graph
"""
# with traversals... I don't have access to the response future... so this is a hack to ensure paging is on
cluster.ContinuousPagingOptions = ContinuousPagingOptionsForTests
ep = self.get_execution_profile(graphson, traversal=True)
ep = self.session.execution_profile_clone_update(ep, continuous_paging_options=None)
self._setup_data(schema, graphson)
self.session.default_fetch_size = 10
g = DseGraph.traversal_source(self.session, execution_profile=ep)
results = g.V().toList()
self.assertEqual(len(results), 100) # 100 results since paging is disabled
def _test_cont_paging_with_custom_options(self, schema, graphson):
"""
Test that we can specify custom paging options.
@jira_ticket PYTHON-1045
@expected_result we get only the desired number of results
@test_category dse graph
"""
ep = self.get_execution_profile(graphson, traversal=True)
ep = self.session.execution_profile_clone_update(ep,
continuous_paging_options=ContinuousPagingOptions(max_pages=1))
self._setup_data(schema, graphson)
self.session.default_fetch_size = 10
g = DseGraph.traversal_source(self.session, execution_profile=ep)
results = g.V().toList()
self.assertEqual(len(results), 10) # only 10 results since paging is disabled
| {
"content_hash": "484dd6235d41b4290d28931e8a5aab52",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 120,
"avg_line_length": 42.004366812227076,
"alnum_prop": 0.651626988252417,
"repo_name": "datastax/python-driver",
"id": "d46a74a146c2fa0d70076c0550257d2b0c8989bf",
"size": "10189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/advanced/graph/fluent/test_graph.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "28914"
},
{
"name": "Cython",
"bytes": "51225"
},
{
"name": "Groovy",
"bytes": "41012"
},
{
"name": "PowerShell",
"bytes": "5631"
},
{
"name": "Python",
"bytes": "3219458"
}
],
"symlink_target": ""
} |
import notifications.settings as app_settings
available_backends = ["database",]
def get_format(action):
"""
find the suitable display format for the given action instance
"""
if action.target:
return app_settings.NOTIFICATION_FORMATS["basic"]
return app_settings.NOTIFICATION_FORMATS["no_target"]
def send_notification_from_action(action, recepients):
from models import Notification
for recepient in recepients:
Notification.objects.create()
def _get_backend():
pass
| {
"content_hash": "b5465759e0112d448992bbc6f785a495",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 70,
"avg_line_length": 16.9375,
"alnum_prop": 0.6900369003690037,
"repo_name": "luterien/django-action-notifications",
"id": "41e46e4096a55136315658e6ed75c5bc6fedc790",
"size": "543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notifications/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12383"
}
],
"symlink_target": ""
} |
import types
import tempfile
import pygsl
import pygsl._numobj as nummodule
from pygsl import vector, ArrayType
from pygsl import matrix_pierre
matrix = matrix_pierre
from pygsl import _block, get_typecode
from array_check import myord, myorda, array_check
import unittest
import sys
sys.stderr = sys.stdout
#pygsl.set_debug_level(10)
def getopentmpfile(mode='rb'):
file = tempfile.TemporaryFile(mode)
assert(type(file.file) == types.FileType)
return file.file
class _DefaultTestCase(unittest.TestCase):
_type = ''
_base = None
_reference_value = 137
#_retrieve = None
def setUp(self):
#print "Testing class ", self.__class__.__name__
sys.stdout.flush()
sys.stderr.flush()
self._mysetUp()
def _get_reference_value(self):
return self._reference_value
def _get_format(self):
return self._format
def _get_function_direct(self, suffix=None):
"""
translate some prefix to the full qualified name of the block
"""
if suffix == None:
suffix = self.function
if self._type == '':
tmp = '_'
else:
tmp = '_' + self._type + '_'
# base is matrix or vector or .....
assert self._base != None, 'Use a derived class!'
base = self._base
function = eval('_block.gsl_' + base + tmp + suffix)
return function
def _get_function_ui(self, suffix=None):
"""
Get the method to the underlying function from the UI.
"""
if suffix == None:
suffix = self.function
if self._type == '': tmp = '.'
else:
tmp = '.' + self._type + '.'
# base is matrix or vector or .....
assert self._base != None, 'Use a derived class!'
base = self._base
function = eval(base + tmp + suffix)
return function
def _get_function(self, suffix=None):
if self._retrieve == 'direct':
return self._get_function_direct(suffix)
elif self._retrieve == 'UI':
return self._get_function_ui(suffix)
else:
tmp = str(self._retrieve)
raise ValueError, "Unknown switch for _retrieve: " + tmp
def test_0_matrixtype(self):
test = 0
try:
assert type(self.array) == ArrayType, "Not an array type"
test = 1
finally:
if test == 0:
print "Expected a type of %s but got a type of %s" %(ArrayType, type(self.array))
def tearDown(self):
self._mytearDown()
class _DirectAccess:
_retrieve = 'direct'
class _UIAccess:
_retrieve = 'UI'
class _DefaultMatrixTestCase(_DefaultTestCase):
_base = 'matrix'
class _DefaultVectorTestCase(_DefaultTestCase):
_base = 'vector'
class _DoubleMatrixTestCase(_DefaultMatrixTestCase):
_type = ''
_format = '%f'
class _FloatMatrixTestCase(_DefaultMatrixTestCase):
_type = 'float'
_format = '%f'
class _ComplexMatrixTestCase(_DefaultMatrixTestCase):
_type = 'complex'
_format = '%f'
class _ComplexFloatMatrixTestCase(_DefaultMatrixTestCase):
_type = 'complex_float'
_format = '%f'
class _LongMatrixTestCase(_DefaultMatrixTestCase):
_type = 'long'
_format = '%ld'
class _IntMatrixTestCase(_DefaultMatrixTestCase):
_type = 'int'
_format = '%d'
class _ShortMatrixTestCase(_DefaultMatrixTestCase):
_type = 'short'
_format = '%d'
class _CharMatrixTestCase(_DefaultMatrixTestCase):
_type = 'char'
_format = '%c'
class _DoubleVectorTestCase(_DefaultVectorTestCase):
_type = ''
_format = '%f'
class _FloatVectorTestCase(_DefaultVectorTestCase):
_type = 'float'
_format = '%f'
class _ComplexVectorTestCase(_DefaultVectorTestCase):
_type = 'complex'
_format = '%f'
class _ComplexFloatVectorTestCase(_DefaultVectorTestCase):
_type = 'complex_float'
_format = '%f'
class _LongVectorTestCase(_DefaultVectorTestCase):
_type = 'long'
_format = '%ld'
class _IntVectorTestCase(_DefaultVectorTestCase):
_type = 'int'
_format = '%d'
class _ShortVectorTestCase(_DefaultVectorTestCase):
_type = 'short'
_format = '%d'
class _CharVectorTestCase(_DefaultVectorTestCase):
_type = 'char'
_format = '%c'
_reference_value = chr(137)
class _SetIdentityMatrixTestCase(_DefaultMatrixTestCase):
function = 'set_identity'
size = 10
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size))
def test_1_matrixsize(self):
array_check(self.array, None, (self.size, self.size))
def test_2_diagonale(self):
for i in range(self.size):
assert self.array[i,i] == 1, "Diagonale not one !"
def test_3_diagonale(self):
for i in range(self.size):
for j in range(self.size):
if i == j :
continue
assert self.array[i,j] == 0, "Of Diagonale not zero!"
def _mytearDown(self):
del self.array
self.array = None
class SetIdentityMatrixTestCase(_DoubleMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityMatrixUITestCase(_DoubleMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityFloatMatrixTestCase(_FloatMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityComplexMatrixTestCase(_ComplexMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityComplexFloatMatrixTestCase(_ComplexFloatMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityLongMatrixTestCase(_LongMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityIntMatrixTestCase(_IntMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityShortMatrixTestCase(_ShortMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityFloatMatrixUITestCase(_FloatMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityComplexMatrixUITestCase(_ComplexMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityComplexFloatMatrixUITestCase(_ComplexFloatMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityLongMatrixUITestCase(_LongMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityIntMatrixUITestCase(_IntMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityShortMatrixUITestCase(_ShortMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
pass
class SetIdentityCharMatrixTestCase(_CharMatrixTestCase,
_UIAccess,
_SetIdentityMatrixTestCase,
):
def test_2_diagonale(self):
for i in range(self.size):
assert myord(self.array[i,i][0]) == 1, "Diagonale not one !"
def test_3_diagonale(self):
for i in range(self.size):
for j in range(self.size):
if i == j :
continue
test = 0
try:
assert myord(self.array[i,j][0]) == 0, "Of Diagonale not zero!"
test = 1
finally:
if test == 0:
print self.array
print self.array[i,j]
class SetIdentityCharMatrixTestCase(_CharMatrixTestCase,
_DirectAccess,
_SetIdentityMatrixTestCase,
):
def test_2_diagonale(self):
for i in range(self.size):
assert myorda(self.array[i,i]) == 1, "Diagonale not one !"
def test_3_diagonale(self):
for i in range(self.size):
for j in range(self.size):
if i == j :
continue
assert myorda(self.array[i,j]) == 0, "Of Diagonale not zero!"
class _SetZeroMatrixTestCase(_DefaultMatrixTestCase):
function = 'set_zero'
size = 10
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size))
def test_1_matrixsize(self):
array_check(self.array, None, (self.size, self.size))
def test_2_all(self):
for i in range(self.size):
for j in range(self.size):
assert self.array[i,j] == 0, "Off Diagonale not zero!"
def test_2_isnull(self):
tmp = self._get_function('isnull')
test = 0
try:
a = tmp(self.array)
test = 1
finally:
if test == 0:
print self, tmp
assert tmp(self.array)
def _mytearDown(self):
del self.array
self.array = None
class SetZeroMatrixTestCase(_DoubleMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroFloatMatrixTestCase(_FloatMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroComplexMatrixTestCase(_ComplexMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroComplexFloatMatrixTestCase(_ComplexFloatMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroLongMatrixTestCase(_LongMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroIntMatrixTestCase(_IntMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroShortMatrixTestCase(_ShortMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroMatrixUITestCase(_DoubleMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroFloatMatrixUITestCase(_FloatMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroComplexMatrixUITestCase(_ComplexMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroComplexFloatMatrixUITestCase(_ComplexFloatMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroLongMatrixUITestCase(_LongMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroIntMatrixUITestCase(_IntMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroShortMatrixUITestCase(_ShortMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
pass
class SetZeroCharMatrixTestCase(_CharMatrixTestCase,
_DirectAccess,
_SetZeroMatrixTestCase,
):
def test_2_all(self):
for i in range(self.size):
for j in range(self.size):
test = 0
try:
assert myorda(self.array[i,j]) == 0, "Of Diagonale not zero!"
test = 1
finally:
if test == 0:
print repr(self.array[i,j])
class SetZeroCharMatrixUITestCase(_CharMatrixTestCase,
_UIAccess,
_SetZeroMatrixTestCase,
):
def test_2_all(self):
for i in range(self.size):
for j in range(self.size):
test = 0
try:
assert myorda(self.array[i,j]) == 0, "Of Diagonale not zero!"
test = 1
finally:
if test == 0:
print repr(self.array[i,j])
class _SetAllMatrixTestCase(_DefaultMatrixTestCase):
function = 'set_all'
size = 10
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size), self._get_reference_value())
def test_1_matrixsize(self):
array_check(self.array, None, (self.size, self.size))
def test_2_all(self):
for i in range(self.size):
for j in range(self.size):
assert self.array[i,j] == self._get_reference_value(), "Value not 137!"
def _mytearDown(self):
del self.array
self.array = None
class SetAllFloatMatrixTestCase(_FloatMatrixTestCase,
_DirectAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllComplexMatrixTestCase(_ComplexMatrixTestCase,
_DirectAccess,
_SetAllMatrixTestCase,
):
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size), self._get_reference_value()+0j)
class SetAllComplexFloatMatrixTestCase(_ComplexFloatMatrixTestCase,
_DirectAccess,
_SetAllMatrixTestCase,
):
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size), 137+0j)
class SetAllLongMatrixTestCase(_LongMatrixTestCase,
_DirectAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllIntMatrixTestCase(_IntMatrixTestCase,
_DirectAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllShortMatrixTestCase(_ShortMatrixTestCase,
_DirectAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllFloatMatrixUITestCase(_FloatMatrixTestCase,
_UIAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllComplexMatrixUITestCase(_ComplexMatrixTestCase,
_UIAccess,
_SetAllMatrixTestCase,
):
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size), 137+0j)
class SetAllComplexFloatMatrixUITestCase(_ComplexFloatMatrixTestCase,
_UIAccess,
_SetAllMatrixTestCase,
):
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size), 137+0j)
class SetAllLongMatrixUITestCase(_LongMatrixTestCase,
_UIAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllIntMatrixUITestCase(_IntMatrixTestCase,
_UIAccess,
_SetAllMatrixTestCase,
):
pass
class SetAllShortMatrixUITestCase(_ShortMatrixTestCase,
_UIAccess,
_SetAllMatrixTestCase,
):
pass
class _MatrixSetup:
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp((self.size, self.size), chr(137))
def test_2_all(self):
for i in range(self.size):
for j in range(self.size):
assert myorda(self.array[i,j]) == 137, "Of Diagonale not zero!"
class SetAllCharMatrixTestCase(_CharMatrixTestCase,
_DirectAccess,
_MatrixSetup,
_SetAllMatrixTestCase,
):
pass
class SetAllCharMatrixUITestCase(_CharMatrixTestCase,
_UIAccess,
_MatrixSetup,
_SetAllMatrixTestCase,
):
pass
class _DiagonalMatrixTestCase(_DefaultMatrixTestCase):
size = 4
def _mysetUp(self):
tmp = self._get_function('set_zero')
array = tmp((self.size, self.size))
type = get_typecode(array)
array = nummodule.zeros((self.size,self.size)).astype(type)
for i in range(self.size):
for j in range(self.size):
if i < j:
array[i,j] = -i
else:
array[i,j] = i
self.array = array
def test_1_matrixsize(self):
array_check(self.array, None, (self.size, self.size))
def _gettranspose(self):
function = self._get_function('transpose')
tmp = function(self.array)
assert(tmp[0] == 0)
return tmp[1]
def test_2_matrixsizetranspose(self):
tmp = self._gettranspose()
assert tmp.shape == (self.size, self.size), "Not of size 10, 10"
def test_3_diagonal(self):
function = self._get_function('diagonal')
tmp = function(self.array)
for i in range(self.size):
msg = "Error in getting diagonal! tmp[+"+`i`+"] = " + `tmp`
#assert tmp[i] == i, msg
def test_4_diagonaltranspose(self):
tmp = self._gettranspose()
for i in range(self.size):
msg = "Error in getting diagonal! tmp[+"+`i`+"] = " + `tmp`
#assert tmp[i,i] == i, msg
def test_5_super_diagonal(self):
function = self._get_function('superdiagonal')
for j in range(1,self.size):
tmp = function(self.array, j)
for i in range(self.size - j):
#assert tmp[i,j] == i*-1, "Error in getting super diagonal!"
pass
def test_6_super_diagonaltranspose(self):
function = self._get_function('superdiagonal')
array = self._gettranspose()
for j in range(1,self.size):
tmp = function(array, j)
for i in range(self.size - j):
msg = "Error in getting super diagonal! tmp[+"+`i`+"] = " + `tmp`
#assert tmp[i,j] == i*1+j, msg
def test_7_sub_diagonal(self):
function = self._get_function('subdiagonal')
for j in range(1,self.size):
tmp = function(self.array, j)
for i in range(self.size - j):
assert tmp[i] == i+j, "Error in getting sub diagonal!"
def _mytearDown(self):
del self.array
self.array = None
class DiagonaMatrixTestCase(_DoubleMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalFloatMatrixTestCase(_FloatMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalComplexMatrixTestCase(_ComplexMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalComplexFloatMatrixTestCase(_ComplexFloatMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalLongMatrixTestCase(_LongMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalIntMatrixTestCase(_IntMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalShortMatrixTestCase(_ShortMatrixTestCase,
_DirectAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonaMatrixUITestCase(_DoubleMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalFloatMatrixUITestCase(_FloatMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalComplexMatrixUITestCase(_ComplexMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalComplexFloatMatrixUITestCase(_ComplexFloatMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalLongMatrixUITestCase(_LongMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalIntMatrixUITestCase(_IntMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class DiagonalShortMatrixUITestCase(_ShortMatrixTestCase,
_UIAccess,
_DiagonalMatrixTestCase,
):
pass
class _MinMaxMatrixTestCase(_DefaultMatrixTestCase):
size = 10
def _mysetUp(self):
tmp = self._get_function('set_zero')
array = tmp((self.size, self.size))
type = get_typecode(array)
array = nummodule.zeros((self.size,self.size)).astype(type)
array[5,4] = -1
array[8,7] = 1
self.array = array
def test_max(self):
function = self._get_function('max')
assert(function(self.array)== 1)
def test_min(self):
function = self._get_function('min')
assert(function(self.array)== -1)
def test_minmax(self):
function = self._get_function('minmax')
tmp = function(self.array)
assert(tmp[0] == -1)
assert(tmp[1] == 1)
def test_minmax(self):
function = self._get_function('minmax')
tmp = function(self.array)
assert(tmp[0] == -1)
assert(tmp[1] == 1)
def test_maxindex(self):
function = self._get_function('max_index')
tmp = function(self.array)
assert(tmp[0] == 8)
assert(tmp[1] == 7)
def test_minindex(self):
function = self._get_function('min_index')
tmp = function(self.array)
assert(tmp[0] == 5)
assert(tmp[1] == 4)
def test_minmaxindex(self):
function = self._get_function('minmax_index')
tmp = function(self.array)
assert(tmp[0] == 5)
assert(tmp[1] == 4)
assert(tmp[2] == 8)
assert(tmp[3] == 7)
def _mytearDown(self):
pass
class MinMaxMatrixTestCase(_DoubleMatrixTestCase,
_DirectAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxFloatMatrixTestCase(_FloatMatrixTestCase,
_DirectAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxLongMatrixTestCase(_LongMatrixTestCase,
_DirectAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxIntMatrixTestCase(_IntMatrixTestCase,
_DirectAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxShortMatrixTestCase(_ShortMatrixTestCase,
_DirectAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxMatrixUITestCase(_DoubleMatrixTestCase,
_UIAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxFloatMatrixUITestCase(_FloatMatrixTestCase,
_UIAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxLongMatrixUITestCase(_LongMatrixTestCase,
_UIAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxIntMatrixUITestCase(_IntMatrixTestCase,
_UIAccess,
_MinMaxMatrixTestCase,
):
pass
class MinMaxShortMatrixUITestCase(_ShortMatrixTestCase,
_UIAccess,
_MinMaxMatrixTestCase,
):
pass
class _SwapMatrixTestCase(_DefaultMatrixTestCase):
size = 10
def _mysetUp(self):
tmp = self._get_function('set_zero')
array = tmp((self.size, self.size))
type = get_typecode(array)
array = nummodule.fromfunction(lambda x,y,size=self.size : x*size + y,
(self.size, self.size))
self.array = array.astype(type)
self.array1 = (array*10).astype(type)
def test_1_swap(self):
function = self._get_function('swap')
type = get_typecode(self.array)
tmp = function(self.array, self.array1)
function = self._get_function('isnull')
assert(function((tmp[1]/10).astype(type) - tmp[2]))
def test_2_swap_columns(self):
function = self._get_function('swap_columns')
tmp = function(self.array, 3, 5)
assert(tmp[0] == 0)
for i in range(self.size):
assert(tmp[1][i,3]==10*i+5)
assert(tmp[1][i,5]==10*i+3)
def test_3_swap_rows(self):
function = self._get_function('swap_rows')
tmp = function(self.array, 3, 5)
assert(tmp[0] == 0)
for i in range(self.size):
assert(tmp[1][3,i]==i+50)
assert(tmp[1][5,i]==i+30)
def test_4_swap_rowcol(self):
function = self._get_function('swap_rowcol')
tmp = function(self.array, 3, 5)
assert(tmp[0] == 0)
for i in range(self.size):
assert(tmp[1][3,i]==10*i+5)
for i in range(self.size):
if i == 3:
assert(tmp[1][3,5] == 55)
elif i == 5:
assert(tmp[1][5,5] == 33)
else:
assert(tmp[1][i,5]==30+i)
# def test_5_fwrite(self):
# print "Seek finished "
# file = getopentmpfile('w')
# function = self._get_function('fwrite')
# tmp = function(file, self.array)
#
# def test_6_fread(self):
#
# file = getopentmpfile('w+')
# function = self._get_function('fwrite')
# tmp = function(file, (self.array * 2).astype(self.get_typecode(array)))
# assert(tmp == 0)
# file.seek(0)
#
# function = self._get_function('fread')
# tmp = function(file, self.array.shape)
# assert(tmp[0] == 0)
# for i in range(self.size):
# for j in range(self.size):
# assert(tmp[1][i,j] == self.array[i,j] * 2)
#
#
# def test_7_fprintf(self):
# file = getopentmpfile('w')
# function = self._get_function('fprintf')
# tmp = function(file, self.array, self._get_format())
# assert(tmp == 0)
#
# def test_8_fscanf(self):
# file = getopentmpfile('w+')
# function = self._get_function('fprintf')
# ttype = self.get_typecode(array)
# tmp = function(file, (self.array*2).astype(ttype), self._get_format())
#
# function = self._get_function('fscanf')
# file.seek(0)
#
# tmp = function(file, self.array.shape)
# assert(tmp[0] == 0)
# for i in range(self.size):
# for j in range(self.size):
# assert(tmp[1][i,j] == self.array[i,j] * 2)
def _mytearDown(self):
pass
class SwapMatrixTestCase(_DoubleMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapFloatMatrixTestCase(_FloatMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapComplexMatrixTestCase(_ComplexMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapComplexFloatMatrixTestCase(_ComplexFloatMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapLongMatrixTestCase(_LongMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapIntMatrixTestCase(_IntMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapShortMatrixTestCase(_ShortMatrixTestCase,
_DirectAccess,
_SwapMatrixTestCase,
):
pass
class SwapMatrixUITestCase(_DoubleMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
class SwapFloatMatrixUITestCase(_FloatMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
class SwapComplexMatrixUITestCase(_ComplexMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
class SwapComplexFloatMatrixUITestCase(_ComplexFloatMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
class SwapLongMatrixUITestCase(_LongMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
class SwapIntMatrixUITestCase(_IntMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
class SwapShortMatrixUITestCase(_ShortMatrixTestCase,
_UIAccess,
_SwapMatrixTestCase,
):
pass
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Vectors
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
class _SetBasisVectorTestCase(_DefaultVectorTestCase):
function = 'set_basis'
size = 10
basis = 5
def _mysetUp(self):
tmp = self._get_function()
basis = self.basis
tmp1 = tmp(self.size, basis)
assert(tmp1[0] == 0)
self.array = tmp1[1]
def test_1_matrixsize(self):
array_check(self.array, None, (self.size,))
def test_2_diagonale(self):
assert self.array[self.basis] == 1, "Basis not one !"
def test_3_diagonale(self):
for i in range(self.size):
if i == self.basis :
continue
assert self.array[i] == 0, "Basis not zero!"
def _mytearDown(self):
del self.array
self.array = None
class SetBasisVectorTestCase(_DoubleVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisVectorUITestCase(_DoubleVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisFloatVectorTestCase(_FloatVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisComplexVectorTestCase(_ComplexVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisComplexFloatVectorTestCase(_ComplexFloatVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisLongVectorTestCase(_LongVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisIntVectorTestCase(_IntVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisShortVectorTestCase(_ShortVectorTestCase,
_DirectAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisFloatVectorUITestCase(_FloatVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisComplexVectorUITestCase(_ComplexVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisComplexFloatVectorUITestCase(_ComplexFloatVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisLongVectorUITestCase(_LongVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisIntVectorUITestCase(_IntVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class SetBasisShortVectorUITestCase(_ShortVectorTestCase,
_UIAccess,
_SetBasisVectorTestCase,
):
pass
class _CharVectorSetup:
def _mysetup(self):
self.array = tmp(self.size, myord(137))
#def test_2_diagonale(self):
# assert ord(self.array[self.basis][0]) == 1, "Diagonale not one !"
#def test_3_diagonale(self):
# for i in range(self.size):
# if i == self.basis :
# continue
# assert ord(self.array[i][0]) == 0, \
# "Off Diagonale not zero!"
class SetBasisCharVectorUITestCase(_CharVectorTestCase,
_UIAccess,
_CharVectorSetup,
_SetBasisVectorTestCase,
):
def test_2_diagonale(self):
assert myord(self.array[self.basis]) == 1, "Basis not one !"
def test_3_diagonale(self):
for i in range(self.size):
if i == self.basis :
continue
assert myord(self.array[i]) == 0, "Basis not zero!"
class SetBasisCharVectorTestCase(_CharVectorTestCase,
_DirectAccess,
_CharVectorSetup,
_SetBasisVectorTestCase,
):
def test_2_diagonale(self):
assert myord(self.array[self.basis]) == 1, "Basis not one !"
def test_3_diagonale(self):
for i in range(self.size):
if i == self.basis :
continue
assert myord(self.array[i]) == 0, "Basis not zero!"
class _SetZeroVectorTestCase(_DefaultVectorTestCase):
function = 'set_zero'
size = 10
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp(self.size)
def test_1_matrixsize(self):
assert self.array.shape == (self.size,), "Not of size 10, 10"
def test_2_all(self):
for i in range(self.size):
assert self.array[i] == 0, "Off Diagonale not zero!"
def test_2_isnull(self):
tmp = self._get_function('isnull')
assert tmp(self.array)
def _mytearDown(self):
del self.array
self.array = None
class SetZeroVectorTestCase(_DoubleVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroFloatVectorTestCase(_FloatVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroComplexVectorTestCase(_ComplexVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroComplexFloatVectorTestCase(_ComplexFloatVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroLongVectorTestCase(_LongVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroIntVectorTestCase(_IntVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroShortVectorTestCase(_ShortVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroVectorUITestCase(_DoubleVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroFloatVectorUITestCase(_FloatVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroComplexVectorUITestCase(_ComplexVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroComplexFloatVectorUITestCase(_ComplexFloatVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroLongVectorUITestCase(_LongVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroIntVectorUITestCase(_IntVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroShortVectorUITestCase(_ShortVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
pass
class SetZeroCharVectorTestCase(_CharVectorTestCase,
_DirectAccess,
_SetZeroVectorTestCase,
):
def test_2_all(self):
for i in range(self.size):
test = 0
cztmp = myorda(self.array[i])
try:
assert cztmp == 0, "Of Diagonale not zero!"
test = 1
finally:
if test == 0:
print "Of Diagonale not zero (but %s) for class %s !" (cztmp, self)
class SetZeroCharVectorUITestCase(_CharVectorTestCase,
_UIAccess,
_SetZeroVectorTestCase,
):
def test_2_all(self):
for i in range(self.size):
assert myorda(self.array[i]) == 0, "Of Diagonale not zero!"
class _SetAllVectorTestCase(_DefaultVectorTestCase):
function = 'set_all'
size = 10
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp(self.size, self._get_reference_value())
def test_1_matrixsize(self):
array_check(self.array, None, (self.size,))
def test_2_all(self):
for i in range(self.size):
tmp = self.array[i]
try:
test = 0
assert tmp == self._get_reference_value(), "Value not 137!"
test = 1
finally:
if test == 0:
print type(self.array), get_typecode(self.array)
print "self.array[%d] was %s" %(i, tmp)
def _mytearDown(self):
del self.array
self.array = None
class SetAllFloatVectorTestCase(_FloatVectorTestCase,
_DirectAccess,
_SetAllVectorTestCase,
):
pass
class _ComplexVectorSetup:
def _mysetUp(self):
tmp = self._get_function()
self.array = tmp(self.size, 137+0j)
class SetAllComplexVectorTestCase(_ComplexVectorTestCase,
_DirectAccess,
_ComplexVectorSetup,
_SetAllVectorTestCase,
):
pass
class SetAllComplexFloatVectorTestCase(_ComplexFloatVectorTestCase,
_DirectAccess,
_ComplexVectorSetup,
_SetAllVectorTestCase,
):
pass
class SetAllLongVectorTestCase(_LongVectorTestCase,
_DirectAccess,
_SetAllVectorTestCase,
):
pass
class SetAllIntVectorTestCase(_IntVectorTestCase,
_DirectAccess,
_SetAllVectorTestCase,
):
pass
class SetAllShortVectorTestCase(_ShortVectorTestCase,
_DirectAccess,
_SetAllVectorTestCase,
):
pass
class SetAllFloatVectorUITestCase(_FloatVectorTestCase,
_UIAccess,
_SetAllVectorTestCase,
):
pass
class SetAllComplexVectorUITestCase(_ComplexVectorTestCase,
_UIAccess,
_ComplexVectorSetup,
_SetAllVectorTestCase,
):
pass
class SetAllComplexFloatVectorUITestCase(_ComplexFloatVectorTestCase,
_UIAccess,
_ComplexVectorSetup,
_SetAllVectorTestCase,
):
pass
class SetAllLongVectorUITestCase(_LongVectorTestCase,
_UIAccess,
_SetAllVectorTestCase,
):
pass
class SetAllIntVectorUITestCase(_IntVectorTestCase,
_UIAccess,
_SetAllVectorTestCase,
):
pass
class SetAllShortVectorUITestCase(_ShortVectorTestCase,
_UIAccess,
_SetAllVectorTestCase,
):
pass
class SetAllCharVectorTestCase(_CharVectorTestCase,
_DirectAccess,
_CharVectorSetup,
_SetAllVectorTestCase,
):
pass
class SetAllCharVectorUITestCase(_CharVectorTestCase,
_UIAccess,
_CharVectorSetup,
_SetAllVectorTestCase,
):
pass
class _MinMaxVectorTestCase(_DefaultVectorTestCase):
size = 10
def _mysetUp(self):
tmp = self._get_function('set_zero')
array = tmp((self.size))
type = get_typecode(array)
array = nummodule.zeros((self.size,)).astype(type)
array[5] = -1
array[8] = 1
self.array = array
def test_max(self):
function = self._get_function('max')
assert(function(self.array)== 1)
def test_min(self):
function = self._get_function('min')
assert(function(self.array)== -1)
def test_minmax(self):
function = self._get_function('minmax')
tmp = function(self.array)
assert(tmp[0] == -1)
assert(tmp[1] == 1)
def test_maxindex(self):
function = self._get_function('max_index')
tmp = function(self.array)
assert(tmp == 8)
def test_minindex(self):
function = self._get_function('min_index')
tmp = function(self.array)
assert(tmp == 5)
def test_minmaxindex(self):
function = self._get_function('minmax_index')
tmp = function(self.array)
assert(tmp[0] == 5)
assert(tmp[1] == 8)
def _mytearDown(self):
pass
class MinMaxVectorTestCase(_DoubleVectorTestCase,
_DirectAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxFloatVectorTestCase(_FloatVectorTestCase,
_DirectAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxLongVectorTestCase(_LongVectorTestCase,
_DirectAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxIntVectorTestCase(_IntVectorTestCase,
_DirectAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxShortVectorTestCase(_ShortVectorTestCase,
_DirectAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxVectorUITestCase(_DoubleVectorTestCase,
_UIAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxFloatVectorUITestCase(_FloatVectorTestCase,
_UIAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxLongVectorUITestCase(_LongVectorTestCase,
_UIAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxIntVectorUITestCase(_IntVectorTestCase,
_UIAccess,
_MinMaxVectorTestCase,
):
pass
class MinMaxShortVectorUITestCase(_ShortVectorTestCase,
_UIAccess,
_MinMaxVectorTestCase,
):
pass
class _SwapVectorTestCase(_DefaultVectorTestCase):
size = 10
def _mysetUp(self):
tmp = self._get_function('set_zero')
array = tmp(self.size)
type = get_typecode(array)
array = nummodule.arange(self.size)
self.array = array.astype(type)
self.array1 = (array*10).astype(type)
def testswap(self):
function = self._get_function('swap')
type = get_typecode(self.array)
tmp = function(self.array, self.array1)
function = self._get_function('isnull')
assert(function((tmp[1]/10).astype(type) - tmp[2]))
def testswap_elements(self):
function = self._get_function('swap_elements')
tmp = function(self.array, 3, 5)
assert(tmp[0] == 0)
for i in range(self.size):
if i == 3:
assert(tmp[1][3] == 5)
elif i == 5:
assert(tmp[1][5] == 3)
else:
assert(tmp[1][i]==i)
def test_reverse(self):
function = self._get_function('reverse')
tmp = function(self.array)
assert(tmp[0] == 0)
for i in range(self.size):
assert(tmp[1][-(i+1)]==i)
# def test_fwrite(self):
# file = getopentmpfile('w')
# function = self._get_function('fwrite')
# #print "Testing fwrite!"
# tmp = function(file, self.array)
#
# def test_fread(self):
# file = getopentmpfile('w+')
# function = self._get_function('fwrite')
# tmp = function(file, (self.array * 2).astype(self.get_typecode(array)))
# assert(tmp == 0)
# file.seek(0)
# function = self._get_function('fread')
# tmp = function(file, self.array.shape[0])
# assert(tmp[0] == 0)
# for i in range(self.size):
# assert(tmp[1][i] == self.array[i] * 2)
#
# def test_fprintf(self):
# file = getopentmpfile('w')
# function = self._get_function('fprintf')
# tmp = function(file, self.array, self._get_format())
# assert(tmp == 0)
#
# def test_fscanf(self):
# file = getopentmpfile('w+')
# function = self._get_function('fprintf')
# ttype = self.get_typecode(array)
# tmp = function(file, (self.array*2).astype(ttype), self._get_format())
#
# function = self._get_function('fscanf')
# file.seek(0)
#
# tmp = function(file, self.array.shape[0])
# assert(tmp[0] == 0)
# for i in range(self.size):
# assert(tmp[1][i] == self.array[i] * 2)
def _mytearDown(self):
pass
class SwapVectorTestCase(_DoubleVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapFloatVectorTestCase(_FloatVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapComplexVectorTestCase(_ComplexVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapComplexFloatVectorTestCase(_ComplexFloatVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapLongVectorTestCase(_LongVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapIntVectorTestCase(_IntVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapShortVectorTestCase(_ShortVectorTestCase,
_DirectAccess,
_SwapVectorTestCase,
):
pass
class SwapVectorUITestCase(_DoubleVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
class SwapFloatVectorUITestCase(_FloatVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
class SwapComplexVectorUITestCase(_ComplexVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
class SwapComplexFloatVectorUITestCase(_ComplexFloatVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
class SwapLongVectorUITestCase(_LongVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
class SwapIntVectorUITestCase(_IntVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
class SwapShortVectorUITestCase(_ShortVectorTestCase,
_UIAccess,
_SwapVectorTestCase,
):
pass
#del DiagonalComplexFloatMatrixTestCase
#del DiagonalComplexFloatMatrixUITestCase
# del SwapComplexFloatMatrixTestCase
# del SwapComplexFloatMatrixUITestCase
# del SwapComplexFloatVectorTestCase
# del SwapComplexFloatVectorUITestCase
# del SwapComplexMatrixTestCase
# del SwapComplexMatrixUITestCase
# del SwapComplexVectorTestCase
# del SwapComplexVectorUITestCase
# del SwapFloatMatrixTestCase
# del SwapFloatMatrixUITestCase
# del SwapFloatVectorTestCase
# del SwapFloatVectorUITestCase
# del SwapIntMatrixTestCase
# del SwapIntMatrixUITestCase
# del SwapIntVectorTestCase
# del SwapIntVectorUITestCase
# del SwapLongMatrixTestCase
# del SwapLongMatrixUITestCase
# del SwapLongVectorTestCase
# del SwapLongVectorUITestCase
# del SwapMatrixTestCase
# del SwapMatrixUITestCase
# del SwapVectorTestCase
# del SwapVectorUITestCase
# del SwapShortMatrixTestCase
# del SwapShortMatrixUITestCase
# del SwapShortVectorTestCase
# del SwapShortVectorUITestCase
# del SetZeroComplexFloatVectorUITestCase
# del SetZeroComplexFloatVectorTestCase
# del SetZeroComplexVectorUITestCase
# del SetZeroComplexVectorTestCase
# del SetZeroComplexFloatMatrixUITestCase
# del SetZeroComplexFloatMatrixTestCase
# del SetZeroComplexMatrixUITestCase
# del SetZeroComplexMatrixTestCase
# del SetZeroIntMatrixTestCase
# del SetZeroIntMatrixUITestCase
# del SetZeroIntVectorTestCase
# del SetZeroIntVectorUITestCase
# del SetZeroLongMatrixTestCase
# del SetZeroLongMatrixUITestCase
# del SetZeroLongVectorTestCase
# del SetZeroLongVectorUITestCase
# del SetZeroMatrixTestCase
# del SetZeroMatrixUITestCase
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Remove ..
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# These just provide a few values.....
del _DefaultTestCase
del _DefaultVectorTestCase
del _DefaultMatrixTestCase
del _DoubleMatrixTestCase
del _FloatMatrixTestCase
del _ComplexMatrixTestCase
del _ComplexFloatMatrixTestCase
del _LongMatrixTestCase
del _ShortMatrixTestCase
del _IntMatrixTestCase
del _CharMatrixTestCase
del _DoubleVectorTestCase
del _FloatVectorTestCase
del _ComplexVectorTestCase
del _ComplexFloatVectorTestCase
del _LongVectorTestCase
del _ShortVectorTestCase
del _IntVectorTestCase
del _CharVectorTestCase
del _DirectAccess
del _UIAccess
del _SetIdentityMatrixTestCase
del _MinMaxMatrixTestCase
del _DiagonalMatrixTestCase
del _SetZeroMatrixTestCase
del _SetAllMatrixTestCase
del _SwapMatrixTestCase
del _SetBasisVectorTestCase
del _MinMaxVectorTestCase
del _SetZeroVectorTestCase
del _SetAllVectorTestCase
del _SwapVectorTestCase
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "58013b72aaf63a40fb828f723f3004b4",
"timestamp": "",
"source": "github",
"line_count": 1812,
"max_line_length": 97,
"avg_line_length": 33.21081677704194,
"alnum_prop": 0.4732792714945661,
"repo_name": "juhnowski/FishingRod",
"id": "ae400ab2bce76d47a6109586111801c70bc9ec15",
"size": "60228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "production/pygsl-0.9.5/tests/block_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4567093"
},
{
"name": "CSS",
"bytes": "85"
},
{
"name": "JavaScript",
"bytes": "17530"
},
{
"name": "Makefile",
"bytes": "2466"
},
{
"name": "Python",
"bytes": "826772"
},
{
"name": "Shell",
"bytes": "230"
},
{
"name": "TeX",
"bytes": "213236"
}
],
"symlink_target": ""
} |
"""
A simple component that dumps packet_in info to the log.
Use --verbose for really verbose dumps.
Use --show to show all packets.
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
import pox.lib.packet as pkt
from pox.lib.util import dpidToStr
log = core.getLogger()
_verbose = None
_max_length = None
_types = None
_show_by_default = None
def _handle_PacketIn (event):
packet = event.parsed
show = _show_by_default
p = packet
while p:
if p.__class__.__name__.lower() in _types:
if _show_by_default:
# This packet is hidden
return
else:
# This packet should be shown
show = True
break
return
if not hasattr(p, 'next'): break
p = p.next
if not show: return
msg = dpidToStr(event.dpid) + ": "
msg = ""
if _verbose:
msg += packet.dump()
else:
p = packet
while p:
if isinstance(p, bytes):
msg += "[%s bytes]" % (len(p),)
break
elif isinstance(p, str):
msg += "[%s chars]" % (len(p),)
break
msg += "[%s]" % (p.__class__.__name__,)
p = p.next
if _max_length:
if len(msg) > _max_length:
msg = msg[:_max_length-3]
msg += "..."
core.getLogger("dump:" + dpidToStr(event.dpid)).debug(msg)
def launch (verbose = False, max_length = 110, full_packets = True,
hide = False, show = False):
global _verbose, _max_length, _types, _show_by_default
_verbose = verbose
_max_length = max_length
force_show = (show is True) or (hide is False and show is False)
if isinstance(hide, str):
hide = hide.replace(',', ' ').replace('|', ' ')
hide = set([p.lower() for p in hide.split()])
else:
hide = set()
if isinstance(show, str):
show = show.replace(',', ' ').replace('|', ' ')
show = set([p.lower() for p in show.split()])
else:
show = set()
if hide and show:
raise RuntimeError("Can't both show and hide packet types")
if show:
_types = show
else:
_types = hide
_show_by_default = not not hide
if force_show:
_show_by_default = force_show
if full_packets:
# Send full packets to controller
core.openflow.miss_send_len = 0xffff
core.openflow.addListenerByName("PacketIn", _handle_PacketIn)
log.info("Packet dumper running")
| {
"content_hash": "819c40d7ad24220addc4a1462325bfc1",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 67,
"avg_line_length": 23.783505154639176,
"alnum_prop": 0.5947117468573906,
"repo_name": "noxrepo/pox",
"id": "2e87802846f8333aba79f58a31f6fa634a605ecd",
"size": "2887",
"binary": false,
"copies": "2",
"ref": "refs/heads/gar-experimental",
"path": "pox/info/packet_dump.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "18894"
},
{
"name": "JavaScript",
"bytes": "9048"
},
{
"name": "Python",
"bytes": "1212596"
},
{
"name": "Shell",
"bytes": "815"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.