code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
import logging
import fmcapi
def test__cert_enrollment(fmc):
logging.info("Testing CertEnrollment class. Requires a CertEnrollment")
obj1 = fmcapi.CertEnrollments(fmc=fmc)
logging.info("All CertEnrollments -- >")
result = obj1.get()
logging.info(result)
logging.info(f"Total items: {len(result['items'])}")
del obj1
logging.info("Testing CertEnrollment class done.\n")
| daxm/fmcapi | unit_tests/certificate_enrollment.py | Python | bsd-3-clause | 405 |
#!/usr/bin/env python3
import os, sys, signal, argparse, configparser, traceback, time
from contextlib import closing
from ananas import PineappleBot
import ananas.default
# Add the cwd to the module search path so that we can load user bot classes
sys.path.append(os.getcwd())
bots = []
def shutdown_all(signum, frame):
for bot in bots:
if bot.state == PineappleBot.RUNNING: bot.shutdown()
sys.exit("Shutdown complete")
def main():
parser = argparse.ArgumentParser(description="Pineapple command line interface.", prog="ananas")
parser.add_argument("config", help="A cfg file to read bot configuration from.")
parser.add_argument("-v", "--verbose", action="store_true", help="Log more extensive messages for e.g. debugging purposes.")
parser.add_argument("-i", "--interactive", action="store_true", help="Use interactive prompts for e.g. mastodon login")
args = parser.parse_args()
prog = sys.argv[0]
cfg = configparser.ConfigParser()
try: cfg.read(args.config)
except FileNotFoundError:
sys.exit("Couldn't open '{}', exiting.".format(args.config))
for bot in cfg:
if bot == "DEFAULT": continue
if not "class" in cfg[bot]:
print("{}: no class specified, skipping {}.".format(prog, bot))
continue
botclass = cfg[bot]["class"]
module, _, botclass = botclass.rpartition(".")
if module == "":
print("{}: no module given in class name '{}', skipping {}.".format(prog, botclass, bot))
try:
exec("from {0} import {1}; bots.append({1}('{2}', name='{3}', interactive={4}, verbose={5}))"
.format(module, botclass, args.config, bot, args.interactive, args.verbose))
except ModuleNotFoundError as e:
print("{}: encountered the following error loading module {}:".format(prog, module))
print("{}: the error was: {}".format(prog, e))
print("{}: skipping {}!".format(prog, bot))
continue
except Exception as e:
print("{}: fatal exception loading bot {}: {}\n{}".format(prog, bot, repr(e), traceback.format_exc()))
continue
except KeyboardInterrupt:
sys.exit()
signal.signal(signal.SIGINT, shutdown_all)
signal.signal(signal.SIGABRT, shutdown_all)
signal.signal(signal.SIGTERM, shutdown_all)
try:
while(True): time.sleep(60)
except KeyboardInterrupt:
shutdown_all(None, None)
if __name__ == "__main__":
main()
| Chronister/ananas | ananas/run.py | Python | mit | 2,535 |
# -*- coding: utf8 -*-
"""
Created on 23/08/2010
@author vbmendes
"""
from django.test import TestCase
from shorturl import conf
class RedirectTest(TestCase):
urls = 'shorturl.urls'
fixtures = ['shorturl-test-data.json']
def setUp(self):
self.old_models = conf.MODELS
conf.MODELS = {
'': 'shorturl.URL',
}
conf.set_prefixes(conf.MODELS)
def tearDown(self):
if self.old_models is not None:
conf.MODELS = self.old_models
conf.set_prefixes(self.old_models)
def test_redirect_view_1z(self):
response = self.client.get('/1z')
self.assertEqual(response.status_code, 301)
self.assertEqual(response['Location'], 'http://google.com/')
def test_redirect_view_5B(self):
response = self.client.get('/5B')
self.assertEqual(response.status_code, 301)
self.assertEqual(response['Location'], 'http://test.com/')
def test_redirect_view_object_does_not_exists(self):
response = self.client.get('/6B')
self.assertEqual(response.status_code, 404)
class PrefixedRedirectTest(TestCase):
urls = 'shorturl.urls'
fixtures = ['shorturl-test-data.json']
def setUp(self):
self.old_models = conf.MODELS
conf.MODELS = {
'u': 'shorturl.URL',
}
conf.set_prefixes(conf.MODELS)
def tearDown(self):
if self.old_models is not None:
conf.MODELS = self.old_models
conf.set_prefixes(self.old_models)
def test_redirect_view_1z(self):
response = self.client.get('/u1z')
self.assertEqual(response.status_code, 301)
self.assertEqual(response['Location'], 'http://google.com/')
def test_redirect_view_5B(self):
response = self.client.get('/u5B')
self.assertEqual(response.status_code, 301)
self.assertEqual(response['Location'], 'http://test.com/')
def test_redirect_view_incorrect_prefix(self):
response = self.client.get('/a1z')
self.assertEqual(response.status_code, 404)
| vbmendes/django-meio-shorturl | src/shorturl/tests/test_views.py | Python | bsd-3-clause | 2,085 |
import requests
import json
from requests_oauthlib import OAuth1
from requests_oauthlib import OAuth1Session
import time
# This code was built using Python 3.5
start_time = time.time()
baseURI = 'https://{YOUR BASE URI}/interaction_history/api/account/{YOUR ACCOUNT NUMBER}/interactions/search?offset=0&limit=10'
consumer_key = 'your consumer key'
consumer_secret = 'your consumer secret'
access_token = 'your access token'
access_token_secret = 'your token secret'
client = requests.session()
postheader = {'content-type': 'application/json'}
params={'offset':'0'}
body={'start':{'from':'1433140200000','to':'1435645800000'}}
oauth = OAuth1(consumer_key,
client_secret=consumer_secret,
resource_owner_key=access_token,
resource_owner_secret=access_token_secret,
signature_method='HMAC-SHA1',
signature_type='auth_header')
response = client.post(url=baseURI, headers=postheader, data=json.dumps(body), auth=oauth, params=params)
results = json.loads(response.content.decode())
# For older versions of python you might need to use this line
#results = json.loads(response.content.decode('utf-8'))
outfile = 'test.txt'
#headers
header = ["stime", "etime", "dur", "vID", "eID", "inter", "agent", "skill", "chan", "startR", "endR"]
#open outfile
file = open(outfile, 'w')
file.write(','.join(header)+ '\n')
#create list of chat text and append to outfile
result=[]
for line in results["interactionHistoryRecords"]:
for x in ["info"]:
temp_list=[]
temp_list.append(line["info"]["startTime"])
temp_list.append(line["info"]["endTime"])
temp_list.append(str(line["info"]["duration"]))
temp_list.append(line["info"]["visitorId"])
temp_list.append(line["info"]["engagementId"])
temp_list.append(str(line["info"]["isInteractive"]))
temp_list.append(line["info"]["agentId"])
temp_list.append(str(line["info"]["skillId"]))
temp_list.append(str(line["info"]["channel"]))
temp_list.append(line["info"]["startReason"])
temp_list.append(line["info"]["endReason"])
result.append(temp_list)
for i in range(0,len(result)):
file.write(','.join(result[i]) + '\n')
file.close()
#print time it took to complete
print("--- %s seconds ---" % (time.time() - start_time))
| scottwestover/LiveEngageExamples | APIs/Engagement History API/Python-EngagementHistoryAPISample/ehapiExample.py | Python | mit | 2,225 |
from django.conf.urls.defaults import patterns, include, url
from django.contrib.auth.decorators import permission_required, login_required
from rapidsms_httprouter.views import console
from django.conf import settings
urlpatterns = patterns('',
url(r'^users/', include('smartmin.users.urls')),
url(r'^text/', include('nsms.text.urls')),
url('^console/', include('nsms.console.urls')),
url('', include('rapidsms_httprouter.urls')),
# add your apps here
url('^stores/', include('stores.urls')),
url('^products/', include('products.urls')),
url('^locales/', include('locales.urls')),
url('^orders/', include('orders.urls')),
url('^landmarks/', include('landmarks.urls')),
url('^transactions/', include('transactions.urls')),
url('^quickblocks/', include('django_quickblocks.urls')),
# public apps
url('^', include('public.urls')),
url('^dash/', include('dashboard.urls')),
# django-sentry for error logging
(r'^sentry/', include('sentry.web.urls')),
)
# site static for development
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}))
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}))
def handler500(request):
"""
500 error handler which includes ``request`` in the context.
Templates: `500.html`
Context: None
"""
from django.template import Context, loader
from django.http import HttpResponseServerError
t = loader.get_template('500.html') # You need to create a 500.html template.
return HttpResponseServerError(t.render(Context({
'request': request,
})))
| nyaruka/motome | motome/urls.py | Python | bsd-3-clause | 1,851 |
# -*- test-case-name: go.apps.surveys.tests.test_vumi_app -*-
from twisted.internet.defer import inlineCallbacks
from vxpolls.example import PollApplication
from vxpolls.manager import PollManager
from vumi.message import TransportUserMessage
from vumi import log
from go.vumitools.app_worker import GoApplicationMixin, GoWorkerConfigMixin
class SurveyConfig(PollApplication.CONFIG_CLASS, GoWorkerConfigMixin):
pass
class SurveyApplication(PollApplication, GoApplicationMixin):
CONFIG_CLASS = SurveyConfig
worker_name = 'survey_application'
def validate_config(self):
# vxpolls
vxp_config = self.config.get('vxpolls', {})
self.poll_prefix = vxp_config.get('prefix')
@inlineCallbacks
def setup_application(self):
yield self._go_setup_worker()
self.pm = PollManager(self.redis, self.poll_prefix)
@inlineCallbacks
def teardown_application(self):
yield self.pm.stop()
yield self._go_teardown_worker()
@inlineCallbacks
def consume_user_message(self, message):
contact = yield self.get_contact_for_message(message, create=True)
yield self._handle_survey_message(message, contact)
@inlineCallbacks
def _handle_survey_message(self, message, contact):
helper_metadata = message['helper_metadata']
go = helper_metadata.get('go')
poll_id = 'poll-%s' % (go.get('conversation_key'),)
helper_metadata['poll_id'] = poll_id
participant = yield self.pm.get_participant(
poll_id, message.user())
poll = yield self.pm.get_poll_for_participant(poll_id, participant)
if poll is None:
yield self.reply_to(
message, 'Service Unavailable. Please try again later.',
continue_session=False)
return
config = yield self.pm.get_config(poll_id)
for key in config.get('include_labels', []):
value = contact.extra[key]
if value and key not in participant.labels:
participant.set_label(key, value)
yield self.pm.save_participant(poll_id, participant)
yield super(SurveyApplication, self).consume_user_message(message)
def start_survey(self, to_addr, contact, conversation, **msg_options):
log.debug('Starting %r -> %s' % (conversation, to_addr))
# We reverse the to_addr & from_addr since we're faking input
# from the client to start the survey.
from_addr = msg_options.pop('from_addr')
conversation.set_go_helper_metadata(
msg_options.setdefault('helper_metadata', {}))
msg = TransportUserMessage(from_addr=to_addr, to_addr=from_addr,
content='', **msg_options)
return self._handle_survey_message(msg, contact)
@inlineCallbacks
def end_session(self, participant, poll, message):
# At the end of a session we want to store the user's responses
# as dynamic values on the contact's record in the contact database.
# This does that.
contact = yield self.get_contact_for_message(message, create=True)
# Clear previous answers from this poll
possible_labels = [q.get('label') for q in poll.questions]
for label in possible_labels:
if (label is not None) and (label in contact.extra):
del contact.extra[label]
contact.extra.update(participant.labels)
yield contact.save()
yield self.pm.save_participant(poll.poll_id, participant)
yield self.trigger_event(message, 'survey_completed', {
'from_addr': message['from_addr'],
'message_id': message['message_id'],
'transport_type': message['transport_type'],
'participant': participant.dump(),
})
yield super(SurveyApplication, self).end_session(
participant, poll, message)
@inlineCallbacks
def process_command_send_survey(self, cmd_id, user_account_key,
conversation_key, batch_id, msg_options,
delivery_class, **extra_params):
conv = yield self.get_conversation(user_account_key, conversation_key)
if conv is None:
log.warning("Cannot find conversation '%s' for user '%s'." % (
conversation_key, user_account_key))
return
for contacts in (yield conv.get_opted_in_contact_bunches(
delivery_class)):
for contact in (yield contacts):
to_addr = contact.addr_for(delivery_class)
# Set some fake msg_options in case we didn't get real ones.
msg_options.setdefault('from_addr', None)
msg_options.setdefault('transport_name', None)
msg_options.setdefault('transport_type', 'sms')
yield self.start_survey(to_addr, contact, conv, **msg_options)
| praekelt/vumi-go | go/apps/surveys/vumi_app.py | Python | bsd-3-clause | 4,964 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------
# test_groupProperties.py
#
# test for groupProperties rule
# ----------------------------------------------------------------
# copyright (c) 2014 - Domen Ipavec
# Distributed under The MIT License, see LICENSE
# ----------------------------------------------------------------
import unittest
from cssqc.parser import CSSQC
from cssqc.qualityWarning import QualityWarning
class Test_groupProperties(unittest.TestCase):
def parse(self, data):
c = CSSQC({"groupProperties": "galjot"})
c.parse(data)
return c
def test_group_pr(self):
sample = '''div {
position: relative;
z-index: 6;
margin: 0;
padding: 0;
width: 100px;
height: 60px;
border: 0;
/* background & color */
background: #fff;
color: #333;
text-align: center
}
'''
c = self.parse(sample)
self.assertEqual(c.warnings, [
QualityWarning('groupProperties', 4),
QualityWarning('groupProperties', 14)
])
| matematik7/CSSQC | tests/test_groupProperties.py | Python | mit | 1,126 |
"""
===========================
make_imbalance function
===========================
An illustration of the make_imbalance function
"""
# Authors: Dayvid Oliveira
# Christos Aridas
# Guillaume Lemaitre <g.lemaitre58@gmail.com>
# License: MIT
from collections import Counter
import matplotlib.pyplot as plt
from sklearn.datasets import make_moons
from imblearn.datasets import make_imbalance
print(__doc__)
def plot_decoration(ax):
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.spines['left'].set_position(('outward', 10))
ax.spines['bottom'].set_position(('outward', 10))
ax.set_xlim([-4, 4])
# Generate the dataset
X, y = make_moons(n_samples=200, shuffle=True, noise=0.5, random_state=10)
# Two subplots, unpack the axes array immediately
f, axs = plt.subplots(2, 3)
axs = [a for ax in axs for a in ax]
axs[0].scatter(X[y == 0, 0], X[y == 0, 1], label="Class #0", alpha=0.5)
axs[0].scatter(X[y == 1, 0], X[y == 1, 1], label="Class #1", alpha=0.5)
axs[0].set_title('Original set')
plot_decoration(axs[0])
def ratio_func(y, multiplier, minority_class):
target_stats = Counter(y)
return {minority_class: int(multiplier * target_stats[minority_class])}
multipliers = [0.9, 0.75, 0.5, 0.25, 0.1]
for i, multiplier in enumerate(multipliers, start=1):
ax = axs[i]
X_, y_ = make_imbalance(X, y, ratio=ratio_func,
**{"multiplier": multiplier,
"minority_class": 1})
ax.scatter(X_[y_ == 0, 0], X_[y_ == 0, 1], label="Class #0", alpha=0.5)
ax.scatter(X_[y_ == 1, 0], X_[y_ == 1, 1], label="Class #1", alpha=0.5)
ax.set_title('ratio = {}'.format(multiplier))
plot_decoration(ax)
plt.tight_layout()
plt.show()
| glemaitre/UnbalancedDataset | examples/datasets/plot_make_imbalance.py | Python | mit | 1,841 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, flt, cstr
from frappe import msgprint, _
import frappe.defaults
from erpnext.accounts.general_ledger import make_gl_entries, delete_gl_entries, process_gl_map
from erpnext.stock.utils import get_incoming_rate
from erpnext.controllers.accounts_controller import AccountsController
class StockController(AccountsController):
def make_gl_entries(self, repost_future_gle=True):
if self.docstatus == 2:
delete_gl_entries(voucher_type=self.doctype, voucher_no=self.name)
if cint(frappe.defaults.get_global_default("auto_accounting_for_stock")):
warehouse_account = get_warehouse_account()
if self.docstatus==1:
gl_entries = self.get_gl_entries(warehouse_account)
make_gl_entries(gl_entries)
if repost_future_gle:
items, warehouses = self.get_items_and_warehouses()
update_gl_entries_after(self.posting_date, self.posting_time, warehouses, items,
warehouse_account)
def get_gl_entries(self, warehouse_account=None, default_expense_account=None,
default_cost_center=None):
if not warehouse_account:
warehouse_account = get_warehouse_account()
sle_map = self.get_stock_ledger_details()
voucher_details = self.get_voucher_details(default_expense_account, default_cost_center, sle_map)
gl_list = []
warehouse_with_no_account = []
for detail in voucher_details:
sle_list = sle_map.get(detail.name)
if sle_list:
for sle in sle_list:
if warehouse_account.get(sle.warehouse):
# from warehouse account
self.check_expense_account(detail)
gl_list.append(self.get_gl_dict({
"account": warehouse_account[sle.warehouse]["name"],
"against": detail.expense_account,
"cost_center": detail.cost_center,
"remarks": self.get("remarks") or "Accounting Entry for Stock",
"debit": flt(sle.stock_value_difference, 2),
}, warehouse_account[sle.warehouse]["account_currency"]))
# to target warehouse / expense account
gl_list.append(self.get_gl_dict({
"account": detail.expense_account,
"against": warehouse_account[sle.warehouse]["name"],
"cost_center": detail.cost_center,
"remarks": self.get("remarks") or "Accounting Entry for Stock",
"credit": flt(sle.stock_value_difference, 2),
}))
elif sle.warehouse not in warehouse_with_no_account:
warehouse_with_no_account.append(sle.warehouse)
if warehouse_with_no_account:
msgprint(_("No accounting entries for the following warehouses") + ": \n" +
"\n".join(warehouse_with_no_account))
return process_gl_map(gl_list)
def get_voucher_details(self, default_expense_account, default_cost_center, sle_map):
if self.doctype == "Stock Reconciliation":
return [frappe._dict({ "name": voucher_detail_no, "expense_account": default_expense_account,
"cost_center": default_cost_center }) for voucher_detail_no, sle in sle_map.items()]
else:
details = self.get("items")
if default_expense_account or default_cost_center:
for d in details:
if default_expense_account and not d.get("expense_account"):
d.expense_account = default_expense_account
if default_cost_center and not d.get("cost_center"):
d.cost_center = default_cost_center
return details
def get_items_and_warehouses(self):
items, warehouses = [], []
if hasattr(self, "items"):
item_doclist = self.get("items")
elif self.doctype == "Stock Reconciliation":
import json
item_doclist = []
data = json.loads(self.reconciliation_json)
for row in data[data.index(self.head_row)+1:]:
d = frappe._dict(zip(["item_code", "warehouse", "qty", "valuation_rate"], row))
item_doclist.append(d)
if item_doclist:
for d in item_doclist:
if d.item_code and d.item_code not in items:
items.append(d.item_code)
if d.get("warehouse") and d.warehouse not in warehouses:
warehouses.append(d.warehouse)
if self.doctype == "Stock Entry":
if d.get("s_warehouse") and d.s_warehouse not in warehouses:
warehouses.append(d.s_warehouse)
if d.get("t_warehouse") and d.t_warehouse not in warehouses:
warehouses.append(d.t_warehouse)
return items, warehouses
def get_stock_ledger_details(self):
stock_ledger = {}
for sle in frappe.db.sql("""select warehouse, stock_value_difference,
voucher_detail_no, item_code, posting_date, actual_qty
from `tabStock Ledger Entry` where voucher_type=%s and voucher_no=%s""",
(self.doctype, self.name), as_dict=True):
stock_ledger.setdefault(sle.voucher_detail_no, []).append(sle)
return stock_ledger
def make_adjustment_entry(self, expected_gle, voucher_obj):
from erpnext.accounts.utils import get_stock_and_account_difference
account_list = [d.account for d in expected_gle]
acc_diff = get_stock_and_account_difference(account_list, expected_gle[0].posting_date)
cost_center = self.get_company_default("cost_center")
stock_adjustment_account = self.get_company_default("stock_adjustment_account")
gl_entries = []
for account, diff in acc_diff.items():
if diff:
gl_entries.append([
# stock in hand account
voucher_obj.get_gl_dict({
"account": account,
"against": stock_adjustment_account,
"debit": diff,
"remarks": "Adjustment Accounting Entry for Stock",
}),
# account against stock in hand
voucher_obj.get_gl_dict({
"account": stock_adjustment_account,
"against": account,
"credit": diff,
"cost_center": cost_center or None,
"remarks": "Adjustment Accounting Entry for Stock",
}),
])
if gl_entries:
from erpnext.accounts.general_ledger import make_gl_entries
make_gl_entries(gl_entries)
def check_expense_account(self, item):
if not item.get("expense_account"):
frappe.throw(_("Expense or Difference account is mandatory for Item {0} as it impacts overall stock value").format(item.item_code))
else:
is_expense_account = frappe.db.get_value("Account",
item.get("expense_account"), "report_type")=="Profit and Loss"
if self.doctype not in ("Purchase Receipt", "Stock Reconciliation", "Stock Entry") and not is_expense_account:
frappe.throw(_("Expense / Difference account ({0}) must be a 'Profit or Loss' account")
.format(item.get("expense_account")))
if is_expense_account and not item.get("cost_center"):
frappe.throw(_("{0} {1}: Cost Center is mandatory for Item {2}").format(
_(self.doctype), self.name, item.get("item_code")))
def get_sl_entries(self, d, args):
sl_dict = frappe._dict({
"item_code": d.get("item_code", None),
"warehouse": d.get("warehouse", None),
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"voucher_type": self.doctype,
"voucher_no": self.name,
"voucher_detail_no": d.name,
"actual_qty": (self.docstatus==1 and 1 or -1)*flt(d.get("stock_qty")),
"stock_uom": frappe.db.get_value("Item", args.get("item_code") or d.get("item_code"), "stock_uom"),
"incoming_rate": 0,
"company": self.company,
"fiscal_year": self.fiscal_year,
"batch_no": cstr(d.get("batch_no")).strip(),
"serial_no": d.get("serial_no"),
"project": d.get("project_name"),
"is_cancelled": self.docstatus==2 and "Yes" or "No"
})
sl_dict.update(args)
return sl_dict
def make_sl_entries(self, sl_entries, is_amended=None, allow_negative_stock=False,
via_landed_cost_voucher=False):
from erpnext.stock.stock_ledger import make_sl_entries
make_sl_entries(sl_entries, is_amended, allow_negative_stock, via_landed_cost_voucher)
def make_gl_entries_on_cancel(self):
if frappe.db.sql("""select name from `tabGL Entry` where voucher_type=%s
and voucher_no=%s""", (self.doctype, self.name)):
self.make_gl_entries()
def get_serialized_items(self):
serialized_items = []
item_codes = list(set([d.item_code for d in self.get("items")]))
if item_codes:
serialized_items = frappe.db.sql_list("""select name from `tabItem`
where has_serial_no=1 and name in ({})""".format(", ".join(["%s"]*len(item_codes))),
tuple(item_codes))
return serialized_items
def get_incoming_rate_for_sales_return(self, item_code, warehouse, against_document):
incoming_rate = 0.0
if against_document and item_code:
incoming_rate = frappe.db.sql("""select abs(stock_value_difference / actual_qty)
from `tabStock Ledger Entry`
where voucher_type = %s and voucher_no = %s
and item_code = %s and warehouse=%s limit 1""",
(self.doctype, against_document, item_code, warehouse))
incoming_rate = incoming_rate[0][0] if incoming_rate else 0.0
return incoming_rate
def update_reserved_qty(self):
so_map = {}
for d in self.get("items"):
if d.so_detail:
if self.doctype == "Delivery Note" and d.against_sales_order:
so_map.setdefault(d.against_sales_order, []).append(d.so_detail)
elif self.doctype == "Sales Invoice" and d.sales_order and self.update_stock:
so_map.setdefault(d.sales_order, []).append(d.so_detail)
for so, so_item_rows in so_map.items():
if so and so_item_rows:
sales_order = frappe.get_doc("Sales Order", so)
if sales_order.status in ["Stopped", "Cancelled"]:
frappe.throw(_("{0} {1} is cancelled or stopped").format(_("Sales Order"), so),
frappe.InvalidStatusError)
sales_order.update_reserved_qty(so_item_rows)
def update_stock_ledger(self):
self.update_reserved_qty()
sl_entries = []
for d in self.get_item_list():
if frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 and flt(d.qty):
return_rate = 0
if cint(self.is_return) and self.return_against and self.docstatus==1:
return_rate = self.get_incoming_rate_for_sales_return(d.item_code,
d.warehouse, self.return_against)
# On cancellation or if return entry submission, make stock ledger entry for
# target warehouse first, to update serial no values properly
if d.warehouse and ((not cint(self.is_return) and self.docstatus==1)
or (cint(self.is_return) and self.docstatus==2)):
sl_entries.append(self.get_sl_entries(d, {
"actual_qty": -1*flt(d.qty),
"incoming_rate": return_rate
}))
if d.target_warehouse:
target_warehouse_sle = self.get_sl_entries(d, {
"actual_qty": flt(d.qty),
"warehouse": d.target_warehouse
})
if self.docstatus == 1:
if not cint(self.is_return):
args = frappe._dict({
"item_code": d.item_code,
"warehouse": d.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"qty": -1*flt(d.qty),
"serial_no": d.serial_no
})
target_warehouse_sle.update({
"incoming_rate": get_incoming_rate(args)
})
else:
target_warehouse_sle.update({
"outgoing_rate": return_rate
})
sl_entries.append(target_warehouse_sle)
if d.warehouse and ((not cint(self.is_return) and self.docstatus==2)
or (cint(self.is_return) and self.docstatus==1)):
sl_entries.append(self.get_sl_entries(d, {
"actual_qty": -1*flt(d.qty),
"incoming_rate": return_rate
}))
self.make_sl_entries(sl_entries)
def validate_warehouse(self):
from erpnext.stock.utils import validate_warehouse_company
warehouses = list(set([d.warehouse for d in
self.get("items") if getattr(d, "warehouse", None)]))
for w in warehouses:
validate_warehouse_company(w, self.company)
def update_gl_entries_after(posting_date, posting_time, for_warehouses=None, for_items=None,
warehouse_account=None):
def _delete_gl_entries(voucher_type, voucher_no):
frappe.db.sql("""delete from `tabGL Entry`
where voucher_type=%s and voucher_no=%s""", (voucher_type, voucher_no))
if not warehouse_account:
warehouse_account = get_warehouse_account()
future_stock_vouchers = get_future_stock_vouchers(posting_date, posting_time, for_warehouses, for_items)
gle = get_voucherwise_gl_entries(future_stock_vouchers, posting_date)
for voucher_type, voucher_no in future_stock_vouchers:
existing_gle = gle.get((voucher_type, voucher_no), [])
voucher_obj = frappe.get_doc(voucher_type, voucher_no)
expected_gle = voucher_obj.get_gl_entries(warehouse_account)
if expected_gle:
if not existing_gle or not compare_existing_and_expected_gle(existing_gle,
expected_gle):
_delete_gl_entries(voucher_type, voucher_no)
voucher_obj.make_gl_entries(repost_future_gle=False)
else:
_delete_gl_entries(voucher_type, voucher_no)
def compare_existing_and_expected_gle(existing_gle, expected_gle):
matched = True
for entry in expected_gle:
for e in existing_gle:
if entry.account==e.account and entry.against_account==e.against_account \
and (not entry.cost_center or not e.cost_center or entry.cost_center==e.cost_center) \
and (entry.debit != e.debit or entry.credit != e.credit):
matched = False
break
return matched
def get_future_stock_vouchers(posting_date, posting_time, for_warehouses=None, for_items=None):
future_stock_vouchers = []
values = []
condition = ""
if for_items:
condition += " and item_code in ({})".format(", ".join(["%s"] * len(for_items)))
values += for_items
if for_warehouses:
condition += " and warehouse in ({})".format(", ".join(["%s"] * len(for_warehouses)))
values += for_warehouses
for d in frappe.db.sql("""select distinct sle.voucher_type, sle.voucher_no
from `tabStock Ledger Entry` sle
where timestamp(sle.posting_date, sle.posting_time) >= timestamp(%s, %s) {condition}
order by timestamp(sle.posting_date, sle.posting_time) asc, name asc""".format(condition=condition),
tuple([posting_date, posting_time] + values), as_dict=True):
future_stock_vouchers.append([d.voucher_type, d.voucher_no])
return future_stock_vouchers
def get_voucherwise_gl_entries(future_stock_vouchers, posting_date):
gl_entries = {}
if future_stock_vouchers:
for d in frappe.db.sql("""select * from `tabGL Entry`
where posting_date >= %s and voucher_no in (%s)""" %
('%s', ', '.join(['%s']*len(future_stock_vouchers))),
tuple([posting_date] + [d[1] for d in future_stock_vouchers]), as_dict=1):
gl_entries.setdefault((d.voucher_type, d.voucher_no), []).append(d)
return gl_entries
def get_warehouse_account():
warehouse_account = frappe._dict()
for d in frappe.db.sql("""select warehouse, name, account_currency from tabAccount
where account_type = 'Warehouse' and (warehouse is not null and warehouse != '')""", as_dict=1):
warehouse_account.setdefault(d.warehouse, d)
return warehouse_account
| MartinEnder/erpnext-de | erpnext/controllers/stock_controller.py | Python | agpl-3.0 | 14,728 |
#!/usr/bin/env python
import random
from src.core import setcore as core
try:
print ("\n [****] Custom Template Generator [****]\n")
author=raw_input(core.setprompt(["7"], "Name of the author"))
filename=randomgen=random.randrange(1,99999999999999999999)
filename=str(filename)+(".template")
origin=raw_input(core.setprompt(["7"], "Source phone # of the template"))
subject=raw_input(core.setprompt(["7"], "Subject of the template"))
body=raw_input(core.setprompt(["7"], "Body of the message"))
filewrite=file("src/templates/sms/%s" % (filename), "w")
filewrite.write("# Author: "+author+"\n#\n#\n#\n")
filewrite.write('ORIGIN='+'"'+origin+'"\n\n')
filewrite.write('SUBJECT='+'"'+subject+'"\n\n')
filewrite.write('BODY='+'"'+body+'"\n')
print "\n"
filewrite.close()
except Exception, e:
core.PrintError("An error occured:")
core.PrintError("ERROR:" + str(e))
| firebitsbr/pwn_plug_sources | src/set/src/sms/client/custom_sms_template.py | Python | gpl-3.0 | 886 |
#!/usr/bin/python
script = r"""
MD Dir1
MD Dir1\Dir2
MF Dir1\readme.txt
COPY Dir1 Dir1
"""
expected = r"""
C:
|_DIR1
|_DIR1
| |_DIR2
| |
| |_readme.txt
|
|_DIR2
|
|_readme.txt
"""
import test
test.run(script, expected)
| artemkin/sandbox | fme/tests/test_copy_to_itself.py | Python | bsd-2-clause | 262 |
import html
import os
from random import randint
import requests
from pdf417as_str import convert
from pdf417as_str import main
# supported symbols
symbols = ' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~'
test_data = [
(symbols, -1),
('ab12', 2),
('Transformation of each CW into a 3 characters string and addition of separators, start row character', -1),
('Transformation of each CW into a 3 characters string and addition of separators, start row character.', -1),
('.,', -1),
('https://wat.com/', -1),
('M1BYKOV/SERGEYAMR EZSTIVF SVXDMEU6 0262 304E000 0000 043>218 0000I 252625814571230', 6),
('M1EDEMSKIY/ANDREY EY5PRXC SVXDMEU6 0266 212Y021F0062 31C>2080 B0E 0 ', 6),
]
# random data
for i in range(100):
test_line = ''
# decoder cant't decode the string is 1 characters long
part_cnt = randint(2, 10)
for part in range(part_cnt):
symbol = symbols[randint(0, len(symbols) - 1)]
test_line += symbol * part_cnt
test_data.append((test_line, randint(-1, 5)))
if __name__ == '__main__':
error_count = 0
res_path = 'barcodes'
if not os.path.exists(res_path):
os.mkdir(res_path)
for test_i, test_value in enumerate(test_data):
test_text = test_value[0]
# make text code
code = main.encode(test_value[0], test_value[1])
# make png
img_name = 'barcode' + str(test_i)
image = convert.to_png(code)
img_path = os.path.join(res_path, '{}.png'.format(img_name))
image.save(img_path, 'png')
# decode
files = {'file': open(img_path, 'rb')}
response = requests.post(url="http://zxing.org/w/decode", files=files)
if not response.text.find('Decode Succeeded') > -1:
print('decode failed for: {}'.format(test_text))
break
decoded_value_raw = response.text[response.text.find('<pre>') + 5: response.text.find('</pre>')]
decoded_value = html.unescape(decoded_value_raw)
if test_text == decoded_value:
print('success: {}'.format(test_text))
else:
error_count += 1
print('error:\n> 1. {0}\n> 2. {1}\n> 3. {2}'.format(test_text, decoded_value, decoded_value_raw))
print('done, error_count = {}'.format(error_count))
| ikvk/pdf417as_str | test/test.py | Python | lgpl-3.0 | 2,344 |
from django.contrib.auth.models import User
from django.db import models
from django.test import TestCase
from lingo.forms import LingoForm
from lingo.models import LabelCustomization
class TestModel(models.Model):
name = models.CharField(max_length=200)
rank = models.CharField(max_length=200)
serial_number = models.CharField(max_length=200)
class LingoTestCase(TestCase):
def setUp(self):
self.user = User()
self.user.username = 'tester'
self.user.email = 'tester@tester.com'
self.user.save()
lc = LabelCustomization()
lc.user = self.user
lc.app_label = 'lingo'
lc.model_name = 'testmodel'
lc.field = 'rank'
lc.custom_label = 'Job Level'
lc.save()
def test_customization(self):
class TestForm(LingoForm):
class Meta:
model = TestModel
uncustomized_form = TestForm()
self.assertEquals(uncustomized_form.fields['rank'].label, 'Rank')
customized_form = TestForm(user=self.user)
self.assertEquals(customized_form.fields['rank'].label, 'Job Level')
| ryates/django-lingo | lingo/tests.py | Python | bsd-3-clause | 1,163 |
from setuptools import setup, find_packages
setup(
name='django-test-html-form',
version='0.1',
description="Make your Django HTML form tests more explicit and concise.",
long_description=open('README.rst').read(),
keywords='django test assert',
author='Dan Claudiu Pop',
author_email='dancladiupop@gmail.com',
url='https://github.com/danclaudiupop/assertHtmlForm',
license='BSD License',
packages=find_packages(),
include_package_data=True,
install_requires=[
'beautifulsoup4',
],
)
| danclaudiupop/django-test-html-form | setup.py | Python | bsd-3-clause | 546 |
#!/usr/bin/env python
"""Test file names for tvnamer
"""
import datetime
files = {}
files['default_format'] = [
{'input': 'Scrubs - [04x19] - My Best Laid Plans',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 4, 'episodenumbers': [19],
'episodenames': ['My Best Laid Plans']},
{'input': 'Scrubs - [02x11]',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 2, 'episodenumbers': [11],
'episodenames': ['My Sex Buddy']},
{'input': 'Scrubs - [04X19] - My Best Laid Plans',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 4, 'episodenumbers': [19],
'episodenames': ['My Best Laid Plans']},
]
files['s01e01_format'] = [
{'input': 'scrubs.s01e01',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'my.name.is.earl.s01e01',
'parsedseriesname': 'my name is earl',
'correctedseriesname': 'My Name Is Earl',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['Pilot']},
{'input': 'scrubs.s01e24.blah.fake',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [24],
'episodenames': ['My Last Day']},
{'input': 'dexter.s04e05.720p.blah',
'parsedseriesname': 'dexter',
'correctedseriesname': 'Dexter',
'seasonnumber': 4, 'episodenumbers': [5],
'episodenames': ['Dirty Harry']},
{'input': 'QI.S04E01.2006-09-29.blah',
'parsedseriesname': 'QI',
'correctedseriesname': 'QI',
'seasonnumber': 4, 'episodenumbers': [1],
'episodenames': ['Danger']},
{'input': 'The Wire s05e10 30.mp4',
'parsedseriesname': 'The Wire',
'correctedseriesname': 'The Wire',
'seasonnumber': 5, 'episodenumbers': [10],
'episodenames': ['-30-']},
{'input': 'Arrested Development - S2 E 02 - Dummy Ep Name.blah',
'parsedseriesname': 'Arrested Development',
'correctedseriesname': 'Arrested Development',
'seasonnumber': 2, 'episodenumbers': [2],
'episodenames': ['The One Where They Build a House']},
{'input': 'Horizon - s2008e02 - Total Isolation.avi',
'parsedseriesname': 'Horizon',
'correctedseriesname': 'Horizon',
'seasonnumber': 2008, 'episodenumbers': [2],
'episodenames': ['Total Isolation']},
{'input': 'Horizon.s2008e02.Total Isolation.avi',
'parsedseriesname': 'Horizon',
'correctedseriesname': 'Horizon',
'seasonnumber': 2008, 'episodenumbers': [2],
'episodenames': ['Total Isolation']},
{'input': 'Horizon - [2008x03] - Total Isolation.avi',
'parsedseriesname': 'Horizon',
'correctedseriesname': 'Horizon',
'seasonnumber': 2008, 'episodenumbers': [3],
'episodenames': ['What on Earth is Wrong With Gravity?']},
{'input': 'Scrubs.0101.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'Scrubs 1x01-720p.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'Scrubs - [s01e01].avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'Scrubs - [01.01].avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': '30 Rock [2.10] Episode 210.avi',
'parsedseriesname': '30 Rock',
'correctedseriesname': '30 Rock',
'seasonnumber': 2, 'episodenumbers': [10],
'episodenames': ['Episode 210']},
{'input': 'scrubs.s01_e01.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'scrubs - s01 - e02 - something.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [2],
'episodenames': ['My Mentor']},
]
files['misc'] = [
{'input': 'Six.Feet.Under.S0201.test_testing-yay',
'parsedseriesname': 'Six Feet Under',
'correctedseriesname': 'Six Feet Under',
'seasonnumber': 2, 'episodenumbers': [1],
'episodenames': ['In the Game']},
{'input': 'Sid.The.Science.Kid.E11.The.Itchy.Tag.WS.ABC.DeF-HIJK',
'parsedseriesname': 'Sid The Science Kid',
'correctedseriesname': 'Sid the Science Kid',
'seasonnumber': None, 'episodenumbers': [11],
'episodenames': ['The Itchy Tag']},
{'input': 'Total Access 247 - [01x01]',
'parsedseriesname': 'total access 247',
'correctedseriesname': 'Total Access 24/7',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['Episode #1']},
{'input': 'Neighbours - Episode 5824 [S 6 - Ep 003] - Fri 15 Jan 2010 [KCRT].avi',
'parsedseriesname': 'Neighbours',
'correctedseriesname': 'Neighbours',
'seasonnumber': 6, 'episodenumbers': [3],
'episodenames': ['Episode 1350']},
{'input': 'Scrubs Season 01 Episode 01 - The Series Title.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
]
files['multiple_episodes'] = [
{'input': 'Scrubs - [01x01-02-03]',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1, 2, 3],
'episodenames': ['My First Day', 'My Mentor', 'My Best Friend\'s Mistake']},
{'input': 'scrubs.s01e23e24',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [23, 24],
'episodenames': ['My Hero', 'My Last Day']},
{'input': 'scrubs.01x23x24',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [23, 24],
'episodenames': ['My Hero', 'My Last Day']},
{'input': 'scrubs.01x23-24',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [23, 24],
'episodenames': ['My Hero', 'My Last Day']},
{'input': 'Stargate SG-1 - [01x01-02]',
'parsedseriesname': 'Stargate SG-1',
'correctedseriesname': 'Stargate SG-1',
'seasonnumber': 1, 'episodenumbers': [1, 2],
'episodenames': ['Children of the Gods (1)', 'Children of the Gods (2)']},
{'input': '[Lunar] Bleach - 11-12 [B937F496]',
'parsedseriesname': 'Bleach',
'correctedseriesname': 'Bleach',
'seasonnumber': None, 'episodenumbers': [11, 12],
'episodenames': ['The Legendary Quincy', 'A Gentle Right Arm']},
{'input': 'scrubs.s01e01e02e03',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1, 2, 3],
'episodenames': ['My First Day', 'My Mentor', 'My Best Friend\'s Mistake']},
{'input': 'Scrubs - [02x01-03]',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 2, 'episodenumbers': [1, 2, 3],
'episodenames': ['My Overkill', 'My Nightingale', 'My Case Study']},
{'input': 'Scrubs - [02x01+02]',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 2, 'episodenumbers': [1, 2],
'episodenames': ['My Overkill', 'My Nightingale']},
{'input': 'Scrubs 2x01+02',
'parsedseriesname': 'scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 2, 'episodenumbers': [1, 2],
'episodenames': ['My Overkill', 'My Nightingale']},
{'input': 'Flight.of.the.Conchords.S01E01-02.An.Ep.name.avi',
'parsedseriesname': 'Flight of the Conchords',
'correctedseriesname': 'Flight of the Conchords',
'seasonnumber': 1, 'episodenumbers': [1, 2],
'episodenames': ['Sally', 'Bret Gives Up the Dream']},
{'input': 'Flight.of.the.Conchords.S01E02e01.An.Ep.name.avi',
'parsedseriesname': 'Flight of the Conchords',
'correctedseriesname': 'Flight of the Conchords',
'seasonnumber': 1, 'episodenumbers': [1, 2],
'episodenames': ['Sally', 'Bret Gives Up the Dream']},
{'input': 'Scrubs s01e22 s01e23 s01e24.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [22, 23, 24],
'episodenames': ['My Occurrence', 'My Hero', 'My Last Day']},
{'input': 'Scrubs s01e22 s01e23.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [22, 23],
'episodenames': ['My Occurrence', 'My Hero']},
{'input': 'Scrubs - 01x22 01x23.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [22, 23],
'episodenames': ['My Occurrence', 'My Hero']},
{'input': 'Scrubs.01x22.01x23.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [22, 23],
'episodenames': ['My Occurrence', 'My Hero']},
{'input': 'Scrubs 1x22 1x23.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [22, 23],
'episodenames': ['My Occurrence', 'My Hero']},
{'input': 'Scrubs.S01E01-E04.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1, 2, 3, 4],
'episodenames': ['My First Day', 'My Mentor', 'My Best Friend\'s Mistake', 'My Old Lady']},
]
files['unicode'] = [
{'input': u'Carniv\xe0le 1x11 - The Day of the Dead',
'parsedseriesname': u'Carniv\xe0le',
'correctedseriesname': u'Carniv\xe0le',
'seasonnumber': 1, 'episodenumbers': [11],
'episodenames': ['The Day of the Dead']},
{'input': u'T\xecnh Ng\u01b0\u1eddi Hi\u1ec7n \u0110\u1ea1i - [01x01]',
'parsedseriesname': u'T\xecnh Ng\u01b0\u1eddi Hi\u1ec7n \u0110\u1ea1i',
'correctedseriesname': u'Virtues Of Harmony II',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': [u'T\xecnh Ng\u01b0\u1eddi Hi\u1ec7n \u0110\u1ea1i - Virtues Of Harmony II']},
{'input': u'The Big Bang Theory - S02E07 - The Panty Pi\xf1ata Polarization.avi',
'parsedseriesname': u'The Big Bang Theory',
'correctedseriesname': u'The Big Bang Theory',
'seasonnumber': 2, 'episodenumbers': [7],
'episodenames': [u'The Panty Pi\xf1ata Polarization']},
{'input': u'NCIS - 1x16.avi',
'parsedseriesname': u'NCIS',
'correctedseriesname': u'NCIS',
'seasonnumber': 1, 'episodenumbers': [16],
'episodenames': [u'B\xeate Noire']},
]
files['anime'] = [
{'input': '[Eclipse] Fullmetal Alchemist Brotherhood - 02 (1280x720 h264) [8452C4BF].mkv',
'parsedseriesname': 'Fullmetal Alchemist Brotherhood',
'correctedseriesname': 'Fullmetal Alchemist: Brotherhood',
'seasonnumber': None, 'episodenumbers': [2],
'episodenames': ['The First Day']},
{'input': '[Shinsen-Subs] Armored Trooper Votoms - 01 [9E3F1D1C].mkv',
'parsedseriesname': 'armored trooper votoms',
'correctedseriesname': 'Armored Trooper VOTOMS',
'seasonnumber': None, 'episodenumbers': [1],
'episodenames': ['War\'s End']},
{'input': '[Shinsen-Subs] Beet - 19 [24DAB497].mkv',
'parsedseriesname': 'beet',
'correctedseriesname': 'Beet the Vandel Buster',
'seasonnumber': None, 'episodenumbers': [19],
'episodenames': ['Threat of the Planet Earth']},
{'input': '[AG-SHS]Victory_Gundam-03_DVD[FC6E3A6F].mkv',
'parsedseriesname': 'victory gundam',
'correctedseriesname': 'Mobile Suit Victory Gundam',
'seasonnumber': None, 'episodenumbers': [3],
'episodenames': ['Uso\'s Fight']},
{'input': '[YuS-SHS]Gintama-24(H264)_[52CA4F8B].mkv',
'parsedseriesname': 'gintama',
'correctedseriesname': 'Gintama',
'seasonnumber': None, 'episodenumbers': [24],
'episodenames': ['Cute Faces Are Always Hiding Something']},
{'input': '[Shinsen-Subs] True Mazinger - 07 [848x480 H.264 Vorbis][787D0074].mkv',
'parsedseriesname': 'True Mazinger',
'correctedseriesname': 'True Mazinger: Shocking! Z Chapter',
'seasonnumber': None, 'episodenumbers': [7],
'episodenames': ['Legend! The Mechanical Beasts of Bardos!']},
{'input': '[BSS]_Tokyo_Magnitude_8.0_-_02_[0E5C4A40].mkv',
'parsedseriesname': 'tokyo magnitude 8.0',
'correctedseriesname': 'Tokyo Magnitude 8.0',
'seasonnumber': None, 'episodenumbers': [2],
'episodenames': ['Broken World']},
{'input': 'Bleach - [310] - Ichigo\'s Resolution.avi',
'parsedseriesname': 'Bleach',
'correctedseriesname': 'Bleach',
'seasonnumber': None, 'episodenumbers': [310],
'episodenames': ['Ichigo\'s Resolution']},
]
files['date_based'] = [
{'input': 'Scrubs.2001-10-02.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'episodenumbers': [datetime.date(2001, 10, 2)],
'episodenames': ['My First Day']},
{'input': 'Scrubs - 2001-10-02 - Old Episode Title.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'episodenumbers': [datetime.date(2001, 10, 2)],
'episodenames': ['My First Day']},
{'input': 'Scrubs - 2001.10.02 - Old Episode Title.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'episodenumbers': [datetime.date(2001, 10, 2)],
'episodenames': ['My First Day']},
{'input': 'yes.we.canberra.2010.08.18.pdtv.xvid',
'parsedseriesname': 'yes we canberra',
'correctedseriesname': 'Yes We Canberra',
'episodenumbers': [datetime.date(2010, 8, 18)],
'episodenames': ['Episode 4']},
]
files['x_of_x'] = [
{'input': 'Scrubs.1of5.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': None, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'Scrubs part 1.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': None, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'Scrubs part 1 of 10.avi', # only one episode, as it's not "1 to 10"
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': None, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': 'Scrubs part 1 and part 2.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': None, 'episodenumbers': [1, 2],
'episodenames': ['My First Day', 'My Mentor']},
{'input': 'Scrubs part 1 to part 3.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': None, 'episodenumbers': [1, 2, 3],
'episodenames': ['My First Day', 'My Mentor', 'My Best Friend\'s Mistake']},
{'input': 'Scrubs part 1 to 4.avi',
'parsedseriesname': 'Scrubs',
'correctedseriesname': 'Scrubs',
'seasonnumber': None, 'episodenumbers': [1, 2, 3, 4],
'episodenames': ['My First Day', 'My Mentor', 'My Best Friend\'s Mistake', 'My Old Lady']},
]
files['no_series_name'] = [
{'input': 's01e01.avi',
'force_name': 'Scrubs',
'parsedseriesname': None,
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
{'input': '[01x01].avi',
'force_name': 'Scrubs',
'parsedseriesname': None,
'correctedseriesname': 'Scrubs',
'seasonnumber': 1, 'episodenumbers': [1],
'episodenames': ['My First Day']},
]
def test_verify_test_data_sanity():
"""Checks all test data is consistent.
Keys within each test category must be consistent, but keys can vary
category to category. E.g date-based episodes do not have a season number
"""
from helpers import assertEquals
for test_category, testcases in files.items():
keys = [ctest.keys() for ctest in testcases]
for k1 in keys:
for k2 in keys:
assertEquals(sorted(k1), sorted(k2))
| lahwaacz/tvnamer | tests/test_files.py | Python | unlicense | 16,174 |
import os
from genomepy.plugins import Plugin
from genomepy.utils import cmd_ok, mkdir_p, rm_rf, run_index_cmd
class Minimap2Plugin(Plugin):
def after_genome_download(self, genome, threads=1, force=False):
if not cmd_ok("minimap2"):
return
# Create index dir
index_dir = genome.plugin["minimap2"]["index_dir"]
index_name = genome.plugin["minimap2"]["index_name"]
if force:
# Start from scratch
rm_rf(index_dir)
mkdir_p(index_dir)
if not any(fname.endswith(".mmi") for fname in os.listdir(index_dir)):
# Create index
cmd = f"minimap2 -t {threads} -d {index_name} {genome.filename}"
run_index_cmd("minimap2", cmd)
def get_properties(self, genome):
props = {
"index_dir": os.path.join(
os.path.dirname(genome.filename), "index", "minimap2"
),
"index_name": os.path.join(
os.path.dirname(genome.filename),
"index",
"minimap2",
f"{genome.name}.mmi",
),
}
return props
| simonvh/genomepy | genomepy/plugins/minimap2.py | Python | mit | 1,157 |
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: conditional_operation_sample.py
DESCRIPTION:
This sample demos conditional set/get/delete operations for app configuration
USAGE: python conditional_operation_sample.py
"""
from azure.core import MatchConditions
from azure.core.exceptions import ResourceModifiedError
from azure.appconfiguration import AzureAppConfigurationClient, ConfigurationSetting
from util import print_configuration_setting, get_connection_string
def main():
CONNECTION_STRING = get_connection_string()
# Create app config client
client = AzureAppConfigurationClient.from_connection_string(CONNECTION_STRING)
# Unconditional set
config_setting = ConfigurationSetting(
key="MyKey",
value="my value",
content_type="my content type",
tags={"my tag": "my tag value"}
)
client.set_configuration_setting(config_setting)
# Unconditional get
first_get = client.get_configuration_setting(key="MyKey")
print_configuration_setting(first_get)
# Conditional get, expect to return None because it is not modified
second_get = client.get_configuration_setting(
key="MyKey",
etag=first_get.etag,
match_condition=MatchConditions.IfModified
)
print_configuration_setting(second_get)
# Conditional set
first_get.value = "new value"
client.set_configuration_setting(
configuration_setting=first_get,
match_condition=MatchConditions.IfNotModified
)
# Conditional set, expect to see error because it is modified
try:
client.set_configuration_setting(
configuration_setting=first_get,
match_condition=MatchConditions.IfNotModified
)
except ResourceModifiedError:
pass
client.delete_configuration_setting(
key="MyKey"
)
if __name__ == "__main__":
main()
| Azure/azure-sdk-for-python | sdk/appconfiguration/azure-appconfiguration/samples/conditional_operation_sample.py | Python | mit | 2,180 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
aliases = [
"number_of_young_households=gridcell.aggregate(urbansim.household.is_young)",
"number_of_home_owners=gridcell.aggregate(urbansim.household.is_home_owner)",
"number_of_home_renters=gridcell.aggregate(urbansim.household.is_home_renter)",
"number_of_households_with_children=gridcell.aggregate(urbansim.household.has_children)",
] | christianurich/VIBe2UrbanSim | 3rdparty/opus/src/urbansim/gridcell/aliases.py | Python | gpl-2.0 | 524 |
"""Simple example showing several generations of spans in a trace.
"""
import argparse
import sys
import time
import traceback
import opentracing
import xray_ot.tracer
def sleep_dot():
"""Short sleep and writes a dot to the STDOUT.
"""
time.sleep(0.05)
sys.stdout.write('.')
sys.stdout.flush()
def add_spans():
"""Calls the opentracing API, doesn't use any X-Ray-specific code.
"""
with opentracing.tracer.start_span(operation_name='trivial/initial_request') as parent_span:
parent_span.set_tag('url', 'localhost')
sleep_dot()
parent_span.log_event('All good here!', payload={'N': 42, 'pi': 3.14, 'abc': 'xyz'})
parent_span.log_kv({'foo': 'bar', 'int': 42, 'float': 4.2, 'bool': True, 'obj': {'blargh': 'hmm', 'whee': 4324}})
parent_span.set_tag('span_type', 'parent')
parent_span.set_tag('int_tag', 5)
parent_span.set_tag('unicode_val', u'non-ascii: \u200b')
parent_span.set_tag('bool_tag', True)
parent_span.set_baggage_item('checked', 'baggage')
sleep_dot()
# This is how you would represent starting work locally.
with opentracing.start_child_span(parent_span, operation_name='trivial/child_request') as child_span:
child_span.set_tag('span_type', 'child')
# Pretend there was an error
child_span.set_tag('error', True)
child_span.log_event('Uh Oh!', payload={'stacktrace': [tuple(f) for
f in traceback.extract_stack()]})
sleep_dot()
# Play with the propagation APIs... this is not IPC and thus not
# where they're intended to be used.
text_carrier = {}
opentracing.tracer.inject(child_span.context, opentracing.Format.TEXT_MAP, text_carrier)
span_context = opentracing.tracer.extract(opentracing.Format.TEXT_MAP, text_carrier)
with opentracing.tracer.start_span(
'trivial/remote_span',
child_of=span_context) as remote_span:
remote_span.log_event('Remote!')
remote_span.set_tag('span_type', 'remote')
sleep_dot()
def xray_tracer_from_args():
"""Initializes X-Ray from the commandline args.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--host', help='The host of the X-Ray daemon to contact.',
default='127.0.0.1')
parser.add_argument('--port', help='The X-Ray daemon port.',
type=int, default=2000)
parser.add_argument('--component_name', help='The component name',
default='TrivialExample')
args = parser.parse_args()
return xray_ot.Tracer(
component_name=args.component_name,
collector_host=args.host,
collector_port=args.port,
verbosity=1)
if __name__ == '__main__':
print('Hello ')
# Use LightStep's opentracing implementation
with xray_tracer_from_args() as tracer:
opentracing.tracer = tracer
add_spans()
print(' World!')
| nornagon/xray-python-opentracing | examples/trivial/main.py | Python | mit | 3,112 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class ConnectionMonitorsOperations(object):
"""ConnectionMonitorsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client API version. Constant value: "2018-01-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-01-01"
self.config = config
def _create_or_update_initial(
self, resource_group_name, network_watcher_name, connection_monitor_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ConnectionMonitor')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorResult', response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionMonitorResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, network_watcher_name, connection_monitor_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Create or update a connection monitor.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param parameters: Parameters that define the operation to create a
connection monitor.
:type parameters:
~azure.mgmt.network.v2018_01_01.models.ConnectionMonitor
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns ConnectionMonitorResult
or ClientRawResponse<ConnectionMonitorResult> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ConnectionMonitorResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'}
def get(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, **operation_config):
"""Gets a connection monitor by name.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ConnectionMonitorResult or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorResult
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'}
def _delete_initial(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes the specified connection monitor.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'}
def _stop_initial(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.stop.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def stop(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Stops the specified connection monitor.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._stop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'}
def _start_initial(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.start.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def start(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Starts the specified connection monitor.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._start_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'}
def _query_initial(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.query.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorQueryResult', response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectionMonitorQueryResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def query(
self, resource_group_name, network_watcher_name, connection_monitor_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Query a snapshot of the most recent connection states.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name given to the connection
monitor.
:type connection_monitor_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
ConnectionMonitorQueryResult or
ClientRawResponse<ConnectionMonitorQueryResult> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorQueryResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorQueryResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._query_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ConnectionMonitorQueryResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
query.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'}
def list(
self, resource_group_name, network_watcher_name, custom_headers=None, raw=False, **operation_config):
"""Lists all connection monitors for the specified Network Watcher.
:param resource_group_name: The name of the resource group containing
Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of ConnectionMonitorResult
:rtype:
~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorResultPaged[~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorResult]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ConnectionMonitorResultPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ConnectionMonitorResultPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors'}
| lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2018_01_01/operations/connection_monitors_operations.py | Python | mit | 32,322 |
from Pipe import Pipe
from PeriodicEnergy import PeriodicEnergy
from StaticImage import StaticImage
from WeeklyExtrema import WeeklyExtrema
from IndividualContest import IndividualContest
| interactiveinstitute/watthappened | drivers/__init__.py | Python | mit | 188 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'MailQueueItem'
db.create_table(u'mailq', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('messageid', self.gf('django.db.models.fields.CharField')(max_length=255)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')()),
('from_address', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, blank=True)),
('to_address', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('subject', self.gf('django.db.models.fields.TextField')(blank=True)),
('hostname', self.gf('django.db.models.fields.TextField')()),
('size', self.gf('django.db.models.fields.IntegerField')()),
('attempts', self.gf('django.db.models.fields.IntegerField')()),
('lastattempt', self.gf('django.db.models.fields.DateTimeField')()),
('direction', self.gf('django.db.models.fields.IntegerField')(default=1)),
('reason', self.gf('django.db.models.fields.TextField')(blank=True)),
('flag', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('status', ['MailQueueItem'])
def backwards(self, orm):
# Deleting model 'MailQueueItem'
db.delete_table(u'mailq')
models = {
'status.mailqueueitem': {
'Meta': {'ordering': "['-timestamp']", 'object_name': 'MailQueueItem', 'db_table': "u'mailq'"},
'attempts': ('django.db.models.fields.IntegerField', [], {}),
'direction': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'flag': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'from_address': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'hostname': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastattempt': ('django.db.models.fields.DateTimeField', [], {}),
'messageid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'reason': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'subject': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {}),
'to_address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
}
}
complete_apps = ['status']
| liveaverage/baruwa | src/baruwa/status/migrations/0001_initial.py | Python | gpl-2.0 | 2,911 |
#!/usr/bin/env python
import json
import sys
metrics = {}
dimensions = {
"dim1": "val1"
}
metrics['first'] = {
"name": "example",
"value": 2.0,
"dimensions": dimensions,
"metricType": "gauge"
}
metrics['second'] = {
"name": "anotherExample",
"value": 2.0,
"dimensions": dimensions,
"metricType": "cumcounter"
}
# Send one metric
print json.dumps(metrics['first'])
# Send them all
print json.dumps(metrics.values())
| tsheasha/fullerite | examples/adhoc/example.py | Python | apache-2.0 | 457 |
"""
================================
Temporal whitening with AR model
================================
Here we fit an AR model to the data and use it
to temporally whiten the signals.
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import numpy as np
from scipy import signal
import matplotlib.pyplot as plt
import mne
from mne.time_frequency import fit_iir_model_raw
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
proj_fname = data_path + '/MEG/sample/sample_audvis_ecg-proj.fif'
raw = mne.io.read_raw_fif(raw_fname)
proj = mne.read_proj(proj_fname)
raw.info['projs'] += proj
raw.info['bads'] = ['MEG 2443', 'EEG 053'] # mark bad channels
# Set up pick list: Gradiometers - bad channels
picks = mne.pick_types(raw.info, meg='grad', exclude='bads')
order = 5 # define model order
picks = picks[:1]
# Estimate AR models on raw data
b, a = fit_iir_model_raw(raw, order=order, picks=picks, tmin=60, tmax=180)
d, times = raw[0, 10000:20000] # look at one channel from now on
d = d.ravel() # make flat vector
innovation = signal.convolve(d, a, 'valid')
d_ = signal.lfilter(b, a, innovation) # regenerate the signal
d_ = np.r_[d_[0] * np.ones(order), d_] # dummy samples to keep signal length
###############################################################################
# Plot the different time series and PSDs
plt.close('all')
plt.figure()
plt.plot(d[:100], label='signal')
plt.plot(d_[:100], label='regenerated signal')
plt.legend()
plt.figure()
plt.psd(d, Fs=raw.info['sfreq'], NFFT=2048)
plt.psd(innovation, Fs=raw.info['sfreq'], NFFT=2048)
plt.psd(d_, Fs=raw.info['sfreq'], NFFT=2048, linestyle='--')
plt.legend(('Signal', 'Innovation', 'Regenerated signal'))
plt.show()
| teonlamont/mne-python | examples/time_frequency/plot_temporal_whitening.py | Python | bsd-3-clause | 1,840 |
import luigi
import sciluigi
import ssl
import cv2
import six.moves.urllib.request
import urllib
import pyannote.video
import pyannote.video.structure
import pyannote.video.face.face
import pyannote.video.face.tracking
import pyannote.video.face.clustering
from pyannote.core import Timeline
import pyannote.core.json
import pyannote_workflows.tasks.person_discovery_2016
class _ShotThreading(sciluigi.ExternalTask):
workdir = luigi.Parameter()
in_video = None
def out_put(self):
TEMPLATE = '{workdir}/_threads/{corpus}/{show}.json'
video = self.in_video().task
corpus = video.corpus
show = video.show
path = TEMPLATE.format(
workdir=self.workdir, corpus=corpus, show=show)
return sciluigi.TargetInfo(self, path)
class _DLIBModel(sciluigi.Task):
workdir = luigi.Parameter()
def out_put(self):
TEMPLATE = '{workdir}/_models/dlib.face.landmarks.dat'
path = TEMPLATE.format(workdir=self.workdir)
return sciluigi.TargetInfo(self, path)
def run(self):
URL = "https://raw.githubusercontent.com/pyannote/pyannote-data/master/dlib.face.landmarks.dat"
context = ssl._create_unverified_context()
resource = six.moves.urllib.request.urlopen(URL, context=context)
with self.out_put().open('w') as fp:
fp.write(resource.read())
class _OpenfaceModel(sciluigi.Task):
workdir = luigi.Parameter()
def out_put(self):
TEMPLATE = '{workdir}/_models/openface.nn4.small2.v1.t7'
path = TEMPLATE.format(workdir=self.workdir)
return sciluigi.TargetInfo(self, path)
def run(self):
URL = "https://raw.githubusercontent.com/pyannote/pyannote-data/master/openface.nn4.small2.v1.t7"
context = ssl._create_unverified_context()
resource = six.moves.urllib.request.urlopen(URL, context=context)
with self.out_put().open('w') as fp:
fp.write(resource.read())
class FaceTracking(sciluigi.Task):
workdir = luigi.Parameter()
in_video = None
in_shot = None
def out_put(self):
TEMPLATE = '{workdir}/face_tracking/{corpus}/{show}.txt'
video = self.in_video().task
corpus = video.corpus
show = video.show
path = TEMPLATE.format(
workdir=self.workdir, corpus=corpus, show=show)
return sciluigi.TargetInfo(self, path)
def run(self):
FACE_TEMPLATE = ('{t:.3f} {identifier:d} '
'{left:.3f} {top:.3f} {right:.3f} {bottom:.3f}\n')
video = pyannote.video.Video(self.in_video().path)
with self.in_shot().open('r') as fp:
shot = pyannote.core.json.load(fp)
shot = shot.get_timeline()
tracking = pyannote.video.face.tracking.FaceTracking(
detect_min_size=0.1,
detect_every=1.0,
track_max_gap=1.0)
with self.out_put().open('w') as fp:
for identifier, track in enumerate(tracking(video, shot)):
for t, (left, top, right, bottom), _ in track:
line = FACE_TEMPLATE.format(
t=t, identifier=identifier,
left=left, right=right, top=top, bottom=bottom)
fp.write(line)
class _FaceLandmarks(sciluigi.Task):
workdir = luigi.Parameter()
in_video = None
in_tracking = None
in_model = None
def out_put(self):
TEMPLATE = '{workdir}/_face_landmarks/{corpus}/{show}.txt'
video = self.in_video().task
corpus = video.corpus
show = video.show
path = TEMPLATE.format(
workdir=self.workdir, corpus=corpus, show=show)
return sciluigi.TargetInfo(self, path)
def run(self):
video = pyannote.video.Video(self.in_video().path)
frame_width, frame_height = video.frame_size
tracking = self.in_tracking().path
face_generator = pyannote_workflows.tasks.person_discovery_2016._getFaceGenerator(
tracking, frame_width, frame_height, double=False)
face_generator.send(None)
model = self.in_model().path
face = pyannote.video.face.face.Face(landmarks=model)
with self.out_put().open('w') as fp:
for timestamp, rgb in video:
# get all detected faces at this time
T, faces = face_generator.send(timestamp)
# not that T might be differ slightly from t
# due to different steps in frame iteration
for identifier, boundingBox, _ in faces:
landmarks = face._get_landmarks(rgb, boundingBox)
fp.write('{t:.3f} {identifier:d}'.format(
t=T, identifier=int(identifier)))
for x, y in landmarks:
fp.write(' {x:.5f} {y:.5f}'.format(
x=x / frame_width,
y=y / frame_height))
fp.write('\n')
class _Openface(sciluigi.Task):
workdir = luigi.Parameter()
in_video = None
in_landmarks = None
in_model = None
def out_put(self):
TEMPLATE = '{workdir}/_openface/{corpus}/{show}.txt'
video = self.in_video().task
corpus = video.corpus
show = video.show
path = TEMPLATE.format(
workdir=self.workdir, corpus=corpus, show=show)
return sciluigi.TargetInfo(self, path)
def run(self):
video = pyannote.video.Video(self.in_video().path)
frame_width, frame_height = video.frame_size
landmarks = self.in_landmarks().path
landmark_generator = pyannote_workflows.tasks.person_discovery_2016._getLandmarkGenerator(
landmarks, frame_width, frame_height)
landmark_generator.send(None)
model = self.in_model().path
face = pyannote.video.face.face.Face(size=96, openface=model)
with self.out_put().open('w') as fp:
for timestamp, rgb in video:
T, shapes = landmark_generator.send(timestamp)
for identifier, landmarks in shapes:
normalized_rgb = face._get_normalized(rgb, landmarks)
normalized_bgr = cv2.cvtColor(normalized_rgb,
cv2.COLOR_BGR2RGB)
openface = face._get_openface(normalized_bgr)
fp.write('{t:.3f} {identifier:d}'.format(
t=T, identifier=identifier))
for x in openface:
fp.write(' {x:.5f}'.format(x=x))
fp.write('\n')
class FaceClustering(sciluigi.Task):
workdir = luigi.Parameter()
in_video = None
in_openface = None
def out_put(self):
TEMPLATE = '{workdir}/face_clustering/{corpus}/{show}.txt'
video = self.in_video().task
corpus = video.corpus
show = video.show
path = TEMPLATE.format(
workdir=self.workdir, corpus=corpus, show=show)
return sciluigi.TargetInfo(self, path)
def run(self):
TEMPLATE = '{identifier:d} {cluster:g}\n'
clustering = pyannote.video.face.clustering.FaceClustering(
threshold=0.4)
openface = self.in_openface().path
starting_point, features = clustering.model.preprocess(openface)
if starting_point:
result = clustering(starting_point, features=features)
else:
result = starting_point
with self.out_put().open('w') as fp:
for _, identifier, cluster in result.itertracks(label=True):
line = TEMPLATE.format(identifier=identifier, cluster=cluster)
fp.write(line)
class FaceWorkflow(sciluigi.WorkflowTask):
workdir = luigi.Parameter(
default='/vol/work1/bredin/mediaeval/PersonDiscovery2016/baseline')
corpus_dir = luigi.Parameter(
default='/vol/corpora5/mediaeval')
corpus = luigi.Parameter(
default='INA')
show = luigi.Parameter(
default='F2_TS/20130607/130607FR20000_B.MPG')
def workflow(self):
video = self.new_task(
'video',
pyannote_workflows.tasks.person_discovery_2016.Video,
corpus_dir=self.corpus_dir,
corpus=self.corpus,
show=self.show)
_shotThreading = self.new_task(
'_shotThreading',
_ShotThreading,
workdir=self.workdir)
_shotThreading.in_video = video.out_put
faceTracking = self.new_task(
'faceTracking',
FaceTracking,
workdir=self.workdir)
faceTracking.in_video = video.out_put
faceTracking.in_shot = _shotThreading.out_put
_faceLandmarks = self.new_task(
'_faceLandmarks',
_FaceLandmarks,
workdir=self.workdir)
_dlibModel = self.new_task(
'_dlibModel',
_DLIBModel,
workdir=self.workdir)
_faceLandmarks.in_video = video.out_put
_faceLandmarks.in_tracking = faceTracking.out_put
_faceLandmarks.in_model = _dlibModel.out_put
_openfaceModel = self.new_task(
'_openfaceModel',
_OpenfaceModel,
workdir=self.workdir)
_openface = self.new_task(
'_openface',
_Openface,
workdir=self.workdir)
_openface.in_video = video.out_put
_openface.in_landmarks = _faceLandmarks.out_put
_openface.in_model = _openfaceModel.out_put
faceClustering = self.new_task(
'faceClustering',
FaceClustering,
workdir=self.workdir)
faceClustering.in_video = video.out_put
faceClustering.in_openface = _openface.out_put
return faceClustering
if __name__ == '__main__':
sciluigi.run_local(main_task_cls=FaceWorkflow)
| pyannote/pyannote-workflows | pyannote_workflows/workflows/persondiscovery2016/face.py | Python | mit | 9,955 |
from __future__ import absolute_import, unicode_literals
import json
from flask import current_app, request, Response
from flask_restful import Resource
from urllib import quote
from freight.config import db
from freight.exceptions import ApiError
from freight.utils.auth import get_current_user
LINK_HEADER = '<{uri}&cursor={cursor}>; rel="{name}"'
class ApiView(Resource):
def is_authorized(self):
current_user = get_current_user()
if current_user:
return True
try:
auth = request.headers['Authorization']
except KeyError:
return False
try:
method, payload = auth.split(' ', 1)
except ValueError:
return False
if method != 'Key':
return False
if payload != current_app.config['API_KEY']:
return False
return True
def dispatch_request(self, *args, **kwargs):
if not self.is_authorized():
return self.error(
message='You are not authorized.',
name='unauthorized',
)
try:
response = super(ApiView, self).dispatch_request(*args, **kwargs)
except ApiError as e:
return self.error(
message=e.message,
name=e.name,
status_code=e.status_code,
)
except Exception:
db.session.rollback()
raise
else:
db.session.commit()
return response
def error(self, message, name=None, status_code=400):
context = {
'error': message,
}
if name:
context['error_name'] = name
return self.respond(context, status_code=status_code)
def respond(self, context, status_code=200, links=None):
response = Response(
json.dumps(context),
mimetype='application/json',
status=status_code,
)
if links:
response.headers['Link'] = ', '.join(links)
return response
def build_cursor_link(self, name, cursor):
querystring = u'&'.join(
u'{0}={1}'.format(quote(k), quote(v))
for k, v in request.args.iteritems()
if k != 'cursor'
)
base_url = request.base_url
if querystring:
base_url = '{0}?{1}'.format(base_url, querystring)
else:
base_url = base_url + '?'
return LINK_HEADER.format(
uri=base_url,
cursor=str(cursor),
name=name,
)
def make_links(self, current_page, has_next_page=None):
links = []
if current_page > 1:
links.append((self.build_cursor_link('previous', current_page - 1)))
if has_next_page:
links.append((self.build_cursor_link('next', current_page + 1)))
return links
def paginate(self, seq, max_limit=100, on_results=None, **kwargs):
cursor = int(request.args.get('cursor', 1))
limit = int(request.args.get('limit', 25) or 0)
if max_limit:
assert limit <= max_limit
if cursor:
offset = (cursor - 1) * limit
result = list(seq[offset:offset + limit + 1])
else:
offset = 0
page = 1
result = list(seq)
links = self.make_links(
current_page=cursor,
has_next_page=limit and len(result) > limit,
)
if limit:
result = result[:limit]
if on_results:
result = on_results(result)
return self.respond(result, links=links, **kwargs)
| jkimbo/freight | freight/api/base.py | Python | apache-2.0 | 3,675 |
# Example script that shows how the co-routines will work
def audit(self, req):
for i in ["adam", "test", "xss", "blah"]:
new_req = req
new_req.param[0] = i
yield new_req
response = yield
if i in response:
print "VULN FOUND"
def example_crawler():
for req in found_requests:
xss = xss.audit(req)
try:
while True:
new_req = xss.next()
response = new_req.send_request()
if crawler.changed_state():
crawler.put_back_to_previous_state()
xss.send(response)
except StopIteration:
pass
| adamdoupe/enemy-of-the-state | audit/example.py | Python | gpl-2.0 | 713 |
# ext/compiler.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
r"""Provides an API for creation of custom ClauseElements and compilers.
Synopsis
========
Usage involves the creation of one or more
:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or
more callables defining its compilation::
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import ColumnClause
class MyColumn(ColumnClause):
inherit_cache = True
@compiles(MyColumn)
def compile_mycolumn(element, compiler, **kw):
return "[%s]" % element.name
Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`,
the base expression element for named column objects. The ``compiles``
decorator registers itself with the ``MyColumn`` class so that it is invoked
when the object is compiled to a string::
from sqlalchemy import select
s = select(MyColumn('x'), MyColumn('y'))
print(str(s))
Produces::
SELECT [x], [y]
Dialect-specific compilation rules
==================================
Compilers can also be made dialect-specific. The appropriate compiler will be
invoked for the dialect in use::
from sqlalchemy.schema import DDLElement
class AlterColumn(DDLElement):
inherit_cache = False
def __init__(self, column, cmd):
self.column = column
self.cmd = cmd
@compiles(AlterColumn)
def visit_alter_column(element, compiler, **kw):
return "ALTER COLUMN %s ..." % element.column.name
@compiles(AlterColumn, 'postgresql')
def visit_alter_column(element, compiler, **kw):
return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name,
element.column.name)
The second ``visit_alter_table`` will be invoked when any ``postgresql``
dialect is used.
.. _compilerext_compiling_subelements:
Compiling sub-elements of a custom expression construct
=======================================================
The ``compiler`` argument is the
:class:`~sqlalchemy.engine.interfaces.Compiled` object in use. This object
can be inspected for any information about the in-progress compilation,
including ``compiler.dialect``, ``compiler.statement`` etc. The
:class:`~sqlalchemy.sql.compiler.SQLCompiler` and
:class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()``
method which can be used for compilation of embedded attributes::
from sqlalchemy.sql.expression import Executable, ClauseElement
class InsertFromSelect(Executable, ClauseElement):
inherit_cache = False
def __init__(self, table, select):
self.table = table
self.select = select
@compiles(InsertFromSelect)
def visit_insert_from_select(element, compiler, **kw):
return "INSERT INTO %s (%s)" % (
compiler.process(element.table, asfrom=True, **kw),
compiler.process(element.select, **kw)
)
insert = InsertFromSelect(t1, select(t1).where(t1.c.x>5))
print(insert)
Produces::
"INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z
FROM mytable WHERE mytable.x > :x_1)"
.. note::
The above ``InsertFromSelect`` construct is only an example, this actual
functionality is already available using the
:meth:`_expression.Insert.from_select` method.
Cross Compiling between SQL and DDL compilers
---------------------------------------------
SQL and DDL constructs are each compiled using different base compilers -
``SQLCompiler`` and ``DDLCompiler``. A common need is to access the
compilation rules of SQL expressions from within a DDL expression. The
``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as
below where we generate a CHECK constraint that embeds a SQL expression::
@compiles(MyConstraint)
def compile_my_constraint(constraint, ddlcompiler, **kw):
kw['literal_binds'] = True
return "CONSTRAINT %s CHECK (%s)" % (
constraint.name,
ddlcompiler.sql_compiler.process(
constraint.expression, **kw)
)
Above, we add an additional flag to the process step as called by
:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This
indicates that any SQL expression which refers to a :class:`.BindParameter`
object or other "literal" object such as those which refer to strings or
integers should be rendered **in-place**, rather than being referred to as
a bound parameter; when emitting DDL, bound parameters are typically not
supported.
Changing the default compilation of existing constructs
=======================================================
The compiler extension applies just as well to the existing constructs. When
overriding the compilation of a built in SQL construct, the @compiles
decorator is invoked upon the appropriate class (be sure to use the class,
i.e. ``Insert`` or ``Select``, instead of the creation function such
as ``insert()`` or ``select()``).
Within the new compilation function, to get at the "original" compilation
routine, use the appropriate visit_XXX method - this
because compiler.process() will call upon the overriding routine and cause
an endless loop. Such as, to add "prefix" to all insert statements::
from sqlalchemy.sql.expression import Insert
@compiles(Insert)
def prefix_inserts(insert, compiler, **kw):
return compiler.visit_insert(insert.prefix_with("some prefix"), **kw)
The above compiler will prefix all INSERT statements with "some prefix" when
compiled.
.. _type_compilation_extension:
Changing Compilation of Types
=============================
``compiler`` works for types, too, such as below where we implement the
MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
@compiles(String, 'mssql')
@compiles(VARCHAR, 'mssql')
def compile_varchar(element, compiler, **kw):
if element.length == 'max':
return "VARCHAR('max')"
else:
return compiler.visit_VARCHAR(element, **kw)
foo = Table('foo', metadata,
Column('data', VARCHAR('max'))
)
Subclassing Guidelines
======================
A big part of using the compiler extension is subclassing SQLAlchemy
expression constructs. To make this easier, the expression and
schema packages feature a set of "bases" intended for common tasks.
A synopsis is as follows:
* :class:`~sqlalchemy.sql.expression.ClauseElement` - This is the root
expression class. Any SQL expression can be derived from this base, and is
probably the best choice for longer constructs such as specialized INSERT
statements.
* :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all
"column-like" elements. Anything that you'd place in the "columns" clause of
a SELECT statement (as well as order by and group by) can derive from this -
the object will automatically have Python "comparison" behavior.
:class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a
``type`` member which is expression's return type. This can be established
at the instance level in the constructor, or at the class level if its
generally constant::
class timestamp(ColumnElement):
type = TIMESTAMP()
inherit_cache = True
* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a
``ColumnElement`` and a "from clause" like object, and represents a SQL
function or stored procedure type of call. Since most databases support
statements along the line of "SELECT FROM <some function>"
``FunctionElement`` adds in the ability to be used in the FROM clause of a
``select()`` construct::
from sqlalchemy.sql.expression import FunctionElement
class coalesce(FunctionElement):
name = 'coalesce'
inherit_cache = True
@compiles(coalesce)
def compile(element, compiler, **kw):
return "coalesce(%s)" % compiler.process(element.clauses, **kw)
@compiles(coalesce, 'oracle')
def compile(element, compiler, **kw):
if len(element.clauses) > 2:
raise TypeError("coalesce only supports two arguments on Oracle")
return "nvl(%s)" % compiler.process(element.clauses, **kw)
* :class:`~sqlalchemy.schema.DDLElement` - The root of all DDL expressions,
like CREATE TABLE, ALTER TABLE, etc. Compilation of ``DDLElement``
subclasses is issued by a ``DDLCompiler`` instead of a ``SQLCompiler``.
``DDLElement`` also features ``Table`` and ``MetaData`` event hooks via the
``execute_at()`` method, allowing the construct to be invoked during CREATE
TABLE and DROP TABLE sequences.
* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which
should be used with any expression class that represents a "standalone"
SQL statement that can be passed directly to an ``execute()`` method. It
is already implicit within ``DDLElement`` and ``FunctionElement``.
Most of the above constructs also respond to SQL statement caching. A
subclassed construct will want to define the caching behavior for the object,
which usually means setting the flag ``inherit_cache`` to the value of
``False`` or ``True``. See the next section :ref:`compilerext_caching`
for background.
.. _compilerext_caching:
Enabling Caching Support for Custom Constructs
==============================================
SQLAlchemy as of version 1.4 includes a
:ref:`SQL compilation caching facility <sql_caching>` which will allow
equivalent SQL constructs to cache their stringified form, along with other
structural information used to fetch results from the statement.
For reasons discussed at :ref:`caching_caveats`, the implementation of this
caching system takes a conservative approach towards including custom SQL
constructs and/or subclasses within the caching system. This includes that
any user-defined SQL constructs, including all the examples for this
extension, will not participate in caching by default unless they positively
assert that they are able to do so. The :attr:`.HasCacheKey.inherit_cache`
attribute when set to ``True`` at the class level of a specific subclass
will indicate that instances of this class may be safely cached, using the
cache key generation scheme of the immediate superclass. This applies
for example to the "synopsis" example indicated previously::
class MyColumn(ColumnClause):
inherit_cache = True
@compiles(MyColumn)
def compile_mycolumn(element, compiler, **kw):
return "[%s]" % element.name
Above, the ``MyColumn`` class does not include any new state that
affects its SQL compilation; the cache key of ``MyColumn`` instances will
make use of that of the ``ColumnClause`` superclass, meaning it will take
into account the class of the object (``MyColumn``), the string name and
datatype of the object::
>>> MyColumn("some_name", String())._generate_cache_key()
CacheKey(
key=('0', <class '__main__.MyColumn'>,
'name', 'some_name',
'type', (<class 'sqlalchemy.sql.sqltypes.String'>,
('length', None), ('collation', None))
), bindparams=[])
For objects that are likely to be **used liberally as components within many
larger statements**, such as :class:`_schema.Column` subclasses and custom SQL
datatypes, it's important that **caching be enabled as much as possible**, as
this may otherwise negatively affect performance.
An example of an object that **does** contain state which affects its SQL
compilation is the one illustrated at :ref:`compilerext_compiling_subelements`;
this is an "INSERT FROM SELECT" construct that combines together a
:class:`_schema.Table` as well as a :class:`_sql.Select` construct, each of
which independently affect the SQL string generation of the construct. For
this class, the example illustrates that it simply does not participate in
caching::
class InsertFromSelect(Executable, ClauseElement):
inherit_cache = False
def __init__(self, table, select):
self.table = table
self.select = select
@compiles(InsertFromSelect)
def visit_insert_from_select(element, compiler, **kw):
return "INSERT INTO %s (%s)" % (
compiler.process(element.table, asfrom=True, **kw),
compiler.process(element.select, **kw)
)
While it is also possible that the above ``InsertFromSelect`` could be made to
produce a cache key that is composed of that of the :class:`_schema.Table` and
:class:`_sql.Select` components together, the API for this is not at the moment
fully public. However, for an "INSERT FROM SELECT" construct, which is only
used by itself for specific operations, caching is not as critical as in the
previous example.
For objects that are **used in relative isolation and are generally
standalone**, such as custom :term:`DML` constructs like an "INSERT FROM
SELECT", **caching is generally less critical** as the lack of caching for such
a construct will have only localized implications for that specific operation.
Further Examples
================
"UTC timestamp" function
-------------------------
A function that works like "CURRENT_TIMESTAMP" except applies the
appropriate conversions so that the time is in UTC time. Timestamps are best
stored in relational databases as UTC, without time zones. UTC so that your
database doesn't think time has gone backwards in the hour when daylight
savings ends, without timezones because timezones are like character
encodings - they're best applied only at the endpoints of an application
(i.e. convert to UTC upon user input, re-apply desired timezone upon display).
For PostgreSQL and Microsoft SQL Server::
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.types import DateTime
class utcnow(expression.FunctionElement):
type = DateTime()
inherit_cache = True
@compiles(utcnow, 'postgresql')
def pg_utcnow(element, compiler, **kw):
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utcnow, 'mssql')
def ms_utcnow(element, compiler, **kw):
return "GETUTCDATE()"
Example usage::
from sqlalchemy import (
Table, Column, Integer, String, DateTime, MetaData
)
metadata = MetaData()
event = Table("event", metadata,
Column("id", Integer, primary_key=True),
Column("description", String(50), nullable=False),
Column("timestamp", DateTime, server_default=utcnow())
)
"GREATEST" function
-------------------
The "GREATEST" function is given any number of arguments and returns the one
that is of the highest value - its equivalent to Python's ``max``
function. A SQL standard version versus a CASE based version which only
accommodates two arguments::
from sqlalchemy.sql import expression, case
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.types import Numeric
class greatest(expression.FunctionElement):
type = Numeric()
name = 'greatest'
inherit_cache = True
@compiles(greatest)
def default_greatest(element, compiler, **kw):
return compiler.visit_function(element)
@compiles(greatest, 'sqlite')
@compiles(greatest, 'mssql')
@compiles(greatest, 'oracle')
def case_greatest(element, compiler, **kw):
arg1, arg2 = list(element.clauses)
return compiler.process(case([(arg1 > arg2, arg1)], else_=arg2), **kw)
Example usage::
Session.query(Account).\
filter(
greatest(
Account.checking_balance,
Account.savings_balance) > 10000
)
"false" expression
------------------
Render a "false" constant expression, rendering as "0" on platforms that
don't have a "false" constant::
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
class sql_false(expression.ColumnElement):
inherit_cache = True
@compiles(sql_false)
def default_false(element, compiler, **kw):
return "false"
@compiles(sql_false, 'mssql')
@compiles(sql_false, 'mysql')
@compiles(sql_false, 'oracle')
def int_false(element, compiler, **kw):
return "0"
Example usage::
from sqlalchemy import select, union_all
exp = union_all(
select(users.c.name, sql_false().label("enrolled")),
select(customers.c.name, customers.c.enrolled)
)
"""
from .. import exc
from ..sql import sqltypes
def compiles(class_, *specs):
"""Register a function as a compiler for a
given :class:`_expression.ClauseElement` type."""
def decorate(fn):
# get an existing @compiles handler
existing = class_.__dict__.get("_compiler_dispatcher", None)
# get the original handler. All ClauseElement classes have one
# of these, but some TypeEngine classes will not.
existing_dispatch = getattr(class_, "_compiler_dispatch", None)
if not existing:
existing = _dispatcher()
if existing_dispatch:
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
raise exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
) from uce
existing.specs["default"] = _wrap_existing_dispatch
# TODO: why is the lambda needed ?
setattr(
class_,
"_compiler_dispatch",
lambda *arg, **kw: existing(*arg, **kw),
)
setattr(class_, "_compiler_dispatcher", existing)
if specs:
for s in specs:
existing.specs[s] = fn
else:
existing.specs["default"] = fn
return fn
return decorate
def deregister(class_):
"""Remove all custom compilers associated with a given
:class:`_expression.ClauseElement` type.
"""
if hasattr(class_, "_compiler_dispatcher"):
class_._compiler_dispatch = class_._original_compiler_dispatch
del class_._compiler_dispatcher
class _dispatcher:
def __init__(self):
self.specs = {}
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
try:
fn = self.specs["default"]
except KeyError as ke:
raise exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
) from ke
# if compilation includes add_to_result_map, collect add_to_result_map
# arguments from the user-defined callable, which are probably none
# because this is not public API. if it wasn't called, then call it
# ourselves.
arm = kw.get("add_to_result_map", None)
if arm:
arm_collection = []
kw["add_to_result_map"] = lambda *args: arm_collection.append(args)
expr = fn(element, compiler, **kw)
if arm:
if not arm_collection:
arm_collection.append(
(None, None, (element,), sqltypes.NULLTYPE)
)
for tup in arm_collection:
arm(*tup)
return expr
| sqlalchemy/sqlalchemy | lib/sqlalchemy/ext/compiler.py | Python | mit | 20,045 |
from sys import stdout
import pprint
import psycopg2
# Make a new Grouvie table to store all the plans
CREATE_GROUVIE = """
CREATE TABLE GROUVIE(
PHONE_NUMBER CHAR(11) NOT NULL,
LEADER CHAR(11) NOT NULL,
CREATION_DATETIME CHAR(19) NOT NULL,
DATE CHAR(10),
SHOWTIME CHAR(5),
FILM TEXT,
CINEMA TEXT,
ACCEPTED BOOLEAN,
PRIMARY KEY (PHONE_NUMBER, LEADER, CREATION_DATETIME)
)
"""
# Make a new User table to store all user data
CREATE_USERS = """
CREATE TABLE USERS(
PHONE_NUMBER CHAR(11) NOT NULL,
NAME TEXT NOT NULL,
POSTCODE TEXT NOT NULL,
LATITUDE NUMERIC(8, 6) NOT NULL,
LONGITUDE NUMERIC(9, 6) NOT NULL,
PRIMARY KEY (PHONE_NUMBER)
)
"""
# Delete a table
DROP_GROUVIE_TABLE = """
DROP TABLE GROUVIE
"""
# Delete a user table
DROP_USERS_TABLE = """
DROP TABLE USERS
"""
# Insert a new entry into the a table
INSERT_GROUVIE = """
INSERT INTO GROUVIE
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s)
"""
# Insert a new entry into the a table
INSERT_USERS = """
INSERT INTO USERS
VALUES
(%s, %s, %s, %s, %s)
"""
ACCEPT_PLAN = """
UPDATE GROUVIE
SET ACCEPTED = true
WHERE
PHONE_NUMBER = %s AND LEADER = %s AND CREATION_DATETIME = %s
"""
CONFIRM_PLAN = """
UPDATE GROUVIE
SET ACCEPTED = true
WHERE
LEADER = %s AND CREATION_DATETIME = %s AND PHONE_NUMBER = LEADER
"""
IS_PLAN_CONFIRMED = """
SELECT ACCEPTED FROM GROUVIE
WHERE
LEADER = %s AND CREATION_DATETIME = %s AND PHONE_NUMBER = LEADER
"""
# Update an already existing entry in the Grouvie table
UPDATE_GROUVIE = """
UPDATE GROUVIE
SET DATE = %s, SHOWTIME = %s, FILM = %s, CINEMA = %s
WHERE
PHONE_NUMBER = %s AND LEADER = %s AND CREATION_DATETIME = %s
"""
# Update an already existing entry in the USER table
UPDATE_USERS = """
UPDATE USERS
SET NAME = %s, POSTCODE = %s, LATITUDE = %s, LONGITUDE = %s
WHERE
PHONE_NUMBER = %s
"""
RESET_USER_PREFS = """
UPDATE GROUVIE
SET DATE = NULL, SHOWTIME = NULL, FILM = NULL, CINEMA = NULL, ACCEPTED = FALSE
WHERE LEADER = %s AND CREATION_DATETIME = %s AND (PHONE_NUMBER != LEADER)
"""
# Delete entry from a table given a phone_number, leader and showtime
DELETE_SINGLE = """
DELETE FROM GROUVIE
WHERE PHONE_NUMBER = %s and LEADER = %s and CREATION_DATETIME = %s
"""
# Delete entries from a table given a leader and showtime
DELETE_PLAN = """
DELETE FROM GROUVIE
WHERE LEADER = %s and CREATION_DATETIME = %s
"""
# Get group replies
GROUP_REPLIES = """
SELECT * FROM GROUVIE
WHERE
LEADER = %s AND CREATION_DATETIME = %s
"""
# Display everything in the Grouvie table
SELECT_ALL_GROUVIE = """
SELECT * FROM GROUVIE
"""
# Display everything in the Grouvie table
SELECT_ALL_USERS = """
SELECT * FROM USERS
"""
# Select a single entry from the Grouvie table based on phone_number
SELECT_GROUVIE = """
SELECT * FROM GROUVIE
WHERE
PHONE_NUMBER = %s
"""
# Select a single entry from the Grouvie table based on phone_number
SELECT_USERS = """
SELECT * FROM USERS
WHERE
PHONE_NUMBER = %s
"""
SELECT_VALID_USERS = """
SELECT PHONE_NUMBER, NAME FROM USERS
WHERE
PHONE_NUMBER IN {}
"""
GROUVIE = "GROUVIE"
USER = "USER"
class DBManager:
# Establish a new connection with the PostgreSQL database.
# We return the cursor so we can execute on the database, we return the
# connection so we can close it when we're done.
def establish_connection(self):
conn_str = "dbname='g1627137_u' user='g1627137_u'" \
"host='db.doc.ic.ac.uk' password='Vk426n3Kjx'"
try:
cnxn = psycopg2.connect(conn_str)
cnxn.autocommit = True
cursor = cnxn.cursor()
return cnxn, cursor
except Exception as e:
message = e.message + "\nFailed to establish connection. " \
"Check connection string."
exit(message)
# Close a connection to the database, kills the cursor and the connection.
def close_connection(self, cnxn, cursor):
try:
cursor.close()
cnxn.close()
except Exception as e:
message = e.message + "\nFailed to close connection."
exit(message)
# Make a new Grouvie table.
def make_grouvie_table(self):
cnxn, cursor = self.establish_connection()
cursor.execute(CREATE_GROUVIE)
self.close_connection(cnxn, cursor)
# Make a new Users table.
def make_user_table(self):
cnxn, cursor = self.establish_connection()
cursor.execute(CREATE_USERS)
self.close_connection(cnxn, cursor)
# Delete a pre-existing table.
def drop_grouvie_table(self):
cnxn, cursor = self.establish_connection()
cursor.execute(DROP_GROUVIE_TABLE)
self.close_connection(cnxn, cursor)
# Delete a pre-existing table.
def drop_user_table(self):
cnxn, cursor = self.establish_connection()
cursor.execute(DROP_USERS_TABLE)
self.close_connection(cnxn, cursor)
# Insert a new entry into the Grouvie table.
def insert_grouvie(self, phone_number, leader, creation_datetime,
date, showtime, film, cinema, accepted):
cnxn, cursor = self.establish_connection()
cursor.execute(INSERT_GROUVIE, (phone_number, leader, creation_datetime,
date, showtime, film, cinema, accepted))
self.close_connection(cnxn, cursor)
def insert_user(self, phone_number, name, postcode, latitude, longitude):
cnxn, cursor = self.establish_connection()
cursor.execute(INSERT_USERS, (phone_number, name, postcode, latitude,
longitude))
self.close_connection(cnxn, cursor)
def accept_plan(self, phone_number, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(ACCEPT_PLAN, (phone_number, leader, creation_datetime))
self.close_connection(cnxn, cursor)
# Update an entry in the Grouvie table if it exists.
def update_grouvie(self, phone_number, leader, creation_datetime, date,
showtime, film, cinema):
cnxn, cursor = self.establish_connection()
cursor.execute(UPDATE_GROUVIE, (date, showtime, film, cinema,
phone_number, leader,
creation_datetime))
self.close_connection(cnxn, cursor)
# Update an entry in the USERS table if it exists.
def update_users(self, phone_number, name, postcode, latitude, longitude):
cnxn, cursor = self.establish_connection()
cursor.execute(UPDATE_USERS, (name, postcode, latitude, longitude,
phone_number))
self.close_connection(cnxn, cursor)
def confirm_plan(self, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(CONFIRM_PLAN, (leader, creation_datetime))
self.close_connection(cnxn, cursor)
def is_plan_confirmed(self, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(IS_PLAN_CONFIRMED, (leader, creation_datetime))
results = cursor.fetchall()
self.close_connection(cnxn, cursor)
return results[0][0]
# Get group replies for a plan
def group_replies(self, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(GROUP_REPLIES, (leader, creation_datetime))
results = cursor.fetchall()
self.close_connection(cnxn, cursor)
all_changes = {}
for i in range(len(results)):
user = results[i]
changes_made = {"accepted": user[7]}
if user[3] is not None:
changes_made["date"] = user[3]
if user[4] is not None:
changes_made["showtime"] = user[4]
if user[5] is not None:
changes_made["film"] = user[5]
if user[6] is not None:
changes_made["cinema"] = user[6]
all_changes[user[0]] = changes_made
print all_changes
stdout.flush()
return all_changes
# Reset all user preferences
def reset_user_prefs(self, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(RESET_USER_PREFS, (leader, creation_datetime))
self.close_connection(cnxn, cursor)
# Delete an entry from the table correlating with a user
def delete_single_grouvie(self, phone_number, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(DELETE_SINGLE, (phone_number, leader, creation_datetime))
self.close_connection(cnxn, cursor)
# Delete entries from the table correlating with a plan
def delete_plan_grouvie(self, leader, creation_datetime):
cnxn, cursor = self.establish_connection()
cursor.execute(DELETE_PLAN, (leader, creation_datetime))
self.close_connection(cnxn, cursor)
# Select an entry in a table based on phone_number.
def select_grouvie(self, phone_number):
cnxn, cursor = self.establish_connection()
cursor.execute(SELECT_GROUVIE, phone_number)
result = cursor.fetchall()
self.close_connection(cnxn, cursor)
return result
# Select an entry in a table based on phone_number.
def select_users(self, phone_number):
cnxn, cursor = self.establish_connection()
cursor.execute(SELECT_USERS, tuple(phone_number))
result = cursor.fetchall()
self.close_connection(cnxn, cursor)
# There should only be 1 result so we just return that tuple.
return result[0] if result else []
# Select users that actually have a Grouvie account.
def select_valid_users(self, friends):
# Build the placeholders which we require when it comes to searching.
fields = "(" + ','.join(["%s"]*len(friends)) + ")"
cnxn, cursor = self.establish_connection()
# friends_tuple = "(" + ",".join(friends) + ")"
# print friends_tuple
cursor.execute(SELECT_VALID_USERS.format(fields), tuple(friends))
print tuple(friends)
results = cursor.fetchall()
self.close_connection(cnxn, cursor)
return results
# Display everything in the Grouvie table.
def select_all_grouvie(self):
cnxn, cursor = self.establish_connection()
cursor.execute(SELECT_ALL_GROUVIE)
result = cursor.fetchall()
self.close_connection(cnxn, cursor)
return result
# Display everything in the Grouvie table.
def select_all_users(self):
cnxn, cursor = self.establish_connection()
cursor.execute(SELECT_ALL_USERS)
results = cursor.fetchall()
self.close_connection(cnxn, cursor)
return results
if __name__ == '__main__':
data = {'PHONE_NUMBER': "1",
'LEADER': 0,
'SHOWTIME': "s",
'FILM': "GOTG3",
'CINEMA': "MEMES",
'ACCEPTED': False}
query = {'PHONE_NUMBER': "1",
'LEADER': 0,
'SHOWTIME': "s"}
db = DBManager()
# db.drop_user_table()
# db.make_user_table()
# db.insert_user("07587247113", "Erkin", "EN12LZ", 51.636495, -0.069549)
# db.insert_user("07964006128", "Tarun", "RM65DU", 51.579983, 0.124262)
# db.insert_user("07942948248", "Jay", "SW100NJ", 51.482079, -0.182265)
# # print db.select_valid_users(("1", "2", "5", "6"))
# db.drop_grouvie_table()
# db.make_grouvie_table()
pprint.PrettyPrinter(indent=4).pprint(db.select_all_grouvie())
pprint.PrettyPrinter(indent=4).pprint(db.select_all_users())
# print db.select_all_users()
# db.select_valid_users(users) | Team-JETT/Grouvie | Back-end/DBManager.py | Python | mit | 11,947 |
import unittest
from libs.funcs import *
class TestFuncs(unittest.TestCase):
def test_buildPaths(self):
recPaths, repPaths, rouPaths, corePaths = buildPaths()
findTxt = lambda x, y: x.find(y) > -1
assert findTxt(recPaths["Task"][0], "base")
assert findTxt(recPaths["Department"][0], "StdPy")
assert findTxt(recPaths["Department"][1], "standard")
assert findTxt(repPaths["ListWindowReport"][0], "base")
assert findTxt(repPaths["ExpensesList"][0], "StdPy")
assert findTxt(repPaths["ExpensesList"][1], "standard")
assert findTxt(rouPaths["GenNLT"][0], "StdPy")
assert findTxt(rouPaths["GenNLT"][1], "standard")
assert findTxt(corePaths["Field"][0], "embedded")
self.assertFalse([k for (k, v) in rouPaths.iteritems() if findTxt(v[0], "base")]) #no routines in base
def test_recordInheritance(self):
recf, recd = getRecordInheritance("Invoice")
assert all([f1 in recf for f1 in ("SalesMan", "InvoiceDate", "CustCode", "Currency", "ShiftDate", "OriginNr", "SerNr", "attachFlag")])
assert all([d in recd for d in ("CompoundItemCosts", "Payments", "Items", "Taxes", "Installs")])
recf, recd = getRecordInheritance("AccessGroup")
assert all([f2 in recf for f2 in ("PurchaseItemsAccessType", "InitialModule", "Closed", "internalId")])
assert all([d in recd for d in ("PurchaseItems", "Customs", "Modules")])
def test_recordsInfo(self):
recf, recd = getRecordsInfo("Department", RECORD)
assert recf["Department"]["AutoCashCancel"] == "integer" #From StdPy
assert recf["Department"]["DeptName"] == "string" #From standard
assert recf["Department"]["Closed"] == "Boolean" #From Master
assert recf["Department"]["internalId"] == "internalid" #From Record
assert recd["Department"]["OfficePayModes"] == "DepartmentOfficePayModeRow" #Recordname from detail
repf, repd = getRecordsInfo("Balance", REPORT)
assert repf["Balance"]["LabelType"] == "string" #StdPy
assert repf["Balance"]["ExplodeByLabel"] == "boolean" #Standard
assert repf["Balance"]["internalId"] == "internalid" #Record
assert not repd["Balance"] #Empty dict, no detail
rouf, roud = getRecordsInfo("GenNLT", ROUTINE)
assert rouf["GenNLT"]["ExcludeInvalid"] == "boolean"
assert rouf["GenNLT"]["Table"] == "string"
assert not roud["GenNLT"]
rouf, roud = getRecordsInfo("LoginDialog", RECORD)
assert rouf["LoginDialog"]["Password"] == "string" #embedded
assert not roud["LoginDialog"]
def test_classInfo(self):
attr, meth = getClassInfo("Invoice")
assert attr["DEBITNOTE"] == 2
assert attr["ATTACH_NOTE"] == 3
assert attr["rowNr"] == 0
assert attr["ParentInvoice"] == "SuperClass"
assert isinstance(attr["DocTypes"], list)
assert isinstance(attr["Origin"], dict)
assert all([m in meth for m in ("getCardReader", "logTransactionAction", "updateCredLimit",
"generateTaxes", "roundValue", "getOriginType", "bring", "getXML", "createField")])
assert meth["fieldIsEditable"][0] == "self"
assert meth["fieldIsEditable"][1] == "fieldname"
assert meth["fieldIsEditable"][2] == {"rowfieldname":'None'}
assert meth["fieldIsEditable"][3] == {"rownr":'None'}
attr, meth = getClassInfo("User")
assert attr["buffer"] == "RecordBuffer"
assert all([m in meth for m in ("store", "save", "load", "hasField")])
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestFuncs))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| ancho85/pylint-playero-plugin | tests/test_funcs.py | Python | gpl-2.0 | 3,850 |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class SubscribedTrackList(ListResource):
""" """
def __init__(self, version, room_sid, participant_sid):
"""
Initialize the SubscribedTrackList
:param Version version: Version that contains the resource
:param room_sid: The SID of the room where the track is published
:param participant_sid: The SID of the participant that subscribes to the track
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackList
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackList
"""
super(SubscribedTrackList, self).__init__(version)
# Path Solution
self._solution = {'room_sid': room_sid, 'participant_sid': participant_sid, }
self._uri = '/Rooms/{room_sid}/Participants/{participant_sid}/SubscribedTracks'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams SubscribedTrackInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists SubscribedTrackInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of SubscribedTrackInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return SubscribedTrackPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of SubscribedTrackInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return SubscribedTrackPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a SubscribedTrackContext
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
"""
return SubscribedTrackContext(
self._version,
room_sid=self._solution['room_sid'],
participant_sid=self._solution['participant_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a SubscribedTrackContext
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
"""
return SubscribedTrackContext(
self._version,
room_sid=self._solution['room_sid'],
participant_sid=self._solution['participant_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Video.V1.SubscribedTrackList>'
class SubscribedTrackPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the SubscribedTrackPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param room_sid: The SID of the room where the track is published
:param participant_sid: The SID of the participant that subscribes to the track
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackPage
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackPage
"""
super(SubscribedTrackPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of SubscribedTrackInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance
"""
return SubscribedTrackInstance(
self._version,
payload,
room_sid=self._solution['room_sid'],
participant_sid=self._solution['participant_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Video.V1.SubscribedTrackPage>'
class SubscribedTrackContext(InstanceContext):
""" """
def __init__(self, version, room_sid, participant_sid, sid):
"""
Initialize the SubscribedTrackContext
:param Version version: Version that contains the resource
:param room_sid: The SID of the Room where the Track resource to fetch is subscribed
:param participant_sid: The SID of the participant that subscribes to the Track resource to fetch
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
"""
super(SubscribedTrackContext, self).__init__(version)
# Path Solution
self._solution = {'room_sid': room_sid, 'participant_sid': participant_sid, 'sid': sid, }
self._uri = '/Rooms/{room_sid}/Participants/{participant_sid}/SubscribedTracks/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a SubscribedTrackInstance
:returns: Fetched SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return SubscribedTrackInstance(
self._version,
payload,
room_sid=self._solution['room_sid'],
participant_sid=self._solution['participant_sid'],
sid=self._solution['sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Video.V1.SubscribedTrackContext {}>'.format(context)
class SubscribedTrackInstance(InstanceResource):
""" """
class Kind(object):
AUDIO = "audio"
VIDEO = "video"
DATA = "data"
def __init__(self, version, payload, room_sid, participant_sid, sid=None):
"""
Initialize the SubscribedTrackInstance
:returns: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance
"""
super(SubscribedTrackInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'participant_sid': payload.get('participant_sid'),
'publisher_sid': payload.get('publisher_sid'),
'room_sid': payload.get('room_sid'),
'name': payload.get('name'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'enabled': payload.get('enabled'),
'kind': payload.get('kind'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {
'room_sid': room_sid,
'participant_sid': participant_sid,
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: SubscribedTrackContext for this SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackContext
"""
if self._context is None:
self._context = SubscribedTrackContext(
self._version,
room_sid=self._solution['room_sid'],
participant_sid=self._solution['participant_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def participant_sid(self):
"""
:returns: The SID of the participant that subscribes to the track
:rtype: unicode
"""
return self._properties['participant_sid']
@property
def publisher_sid(self):
"""
:returns: The SID of the participant that publishes the track
:rtype: unicode
"""
return self._properties['publisher_sid']
@property
def room_sid(self):
"""
:returns: The SID of the room where the track is published
:rtype: unicode
"""
return self._properties['room_sid']
@property
def name(self):
"""
:returns: The track name
:rtype: unicode
"""
return self._properties['name']
@property
def date_created(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def enabled(self):
"""
:returns: Whether the track is enabled
:rtype: bool
"""
return self._properties['enabled']
@property
def kind(self):
"""
:returns: The track type
:rtype: SubscribedTrackInstance.Kind
"""
return self._properties['kind']
@property
def url(self):
"""
:returns: The absolute URL of the resource
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch a SubscribedTrackInstance
:returns: Fetched SubscribedTrackInstance
:rtype: twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance
"""
return self._proxy.fetch()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Video.V1.SubscribedTrackInstance {}>'.format(context)
| tysonholub/twilio-python | twilio/rest/video/v1/room/room_participant/room_participant_subscribed_track.py | Python | mit | 15,072 |
#!/usr/bin/env python
# Copyright 2015 Bryce Schroeder, www.bryce.pw, bryce.schroeder@gmail.com
# Wiki: http://www.ferazelhosting.net/wiki/delv
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Please do not make trouble for me or the Technical Documentation Project by
# using this software to create versions of the "Cythera Data" file which
# have bypassed registration checks.
# Also, remember that the "Cythera Data" file is copyrighted by Ambrosia and
# /or Glenn Andreas, and publishing modified versions without their permission
# would violate that copyright.
#
# "Cythera" and "Delver" are trademarks of either Glenn Andreas or
# Ambrosia Software, Inc.
# This file addresses sundry storage types used within Delver Archives,
# and as such is mostly a helper for other parts of delv.
import delv.util, delv.archive, delv.store, delv.library
import delv.colormap, delv.level
import editors
import cStringIO as StringIO
import gtk
import operator, re
class SearchCriterion(object):
def __init__(self, mode, accessor):
# Yeah, this is kinda hokey, maybe it should use a parser?
# or have drop down menus to pick these
self.accessor = accessor
# has to be first since re have funny characters :D
self.remode = '/' in mode
if self.remode:
try:
self.re = re.compile(mode[1:])
except Exception,e:
# would like an error message but we'd have to give up on
# live updates of the search window...
print repr(e)#self.error_message(
self.re = None
return
self.stringmode = '"' in mode
if self.stringmode:
self.query = mode.replace('"','')
return
ops = [('>=', operator.ge), ('<=', operator.le),
('>', operator.gt), ('<', operator.lt),
('!=',operator.ne),
('!&',(lambda a,b: not (a & b))), ('&', operator.and_)]
self.op = operator.eq
self.mask = 0xFFFFFFFF
self.offset = 0
if '#' in mode:
self.mask=0x0000FFFF
self.offset = 0
if '@' in mode:
self.mask=0x0000FFFF
self.offset = 0x100
for tok,op in ops:
if tok in mode:
self.op = op
mode = mode.replace(tok,'')
break
self.operand = self.parse_int(mode)
def parse_int(self,mode):
base = 10
mask = 0xFFFFFFFF
if '@' in mode:
mode = mode.replace('@','')
if '#' in mode:
mode = mode.replace('#','')
if '0x' in mode:
base = 16
mode = mode.replace('0x','')
if ',' in mode:
x,y = mode.split(',')
x,y = int(x,base), int(y,base)
return (x<<12)|y
#mask=0x000000FFFF
return int(mode,base)
else:
return int(mode,base)
def evaluate(self, thing):
value = self.accessor(thing)
if self.remode:
if not self.re: return None
return self.re.search(value) is not None
if self.stringmode:
return self.query in value
if isinstance(value, str):
value=self.parse_int(value.strip().split()[0])
if self.mask != 0xFFFFFFFF: value -= self.offset
return self.op(value&self.mask, self.operand&self.mask)
class PropListEditor(editors.Editor):
name = "Prop List Editor [nothing opened]"
default_size = 800,600
co_object = delv.level.PropList
def gui_setup(self):
pbox = gtk.VBox(False,0)
self.search_criteria = {}
self.set_default_size(*self.default_size)
menu_items = (
("/File/Save Resource", "<control>S", self.file_save, 0, None),
("/File/Revert", None, self.revert, 0, None),
#("/File/Export CSV", None, self.export_csv, 0, None),
#("/File/Import CSV", None, self.import_csv, 0, None),
#("/Edit/Cut", "<control>X", None, 0, None),
#("/Edit/Copy", "<control>C", None, 0, None),
#("/Edit/Paste", "<control>V", None, 0, None),
#("/Edit/Delete", None, None, 0, None),
("/Edit/Insert Entry", "<control>N", self.edit_insert,0,None),
("/Edit/Select All", "<control>A", self.select_all,0,None),
("/Map/Open Map", "<control>M", self.open_map, 0, None),
("/Map/Send Selection to Map", "<alt>S",
self.send_selection, 0, None),
("/Map/Reload Map", "<alt>R", self.reload_map, 0, None),
#("/Select/Container Contents", "<control>O",None,0,None),
#("/Select/Others in Cell","<control>T",None,0,None),
#("/Select/Parent","<control>P",None,0,None),
#("/Select/Scroll to Selected","<control>F",None,0,None),
)
accel = gtk.AccelGroup()
ifc = gtk.ItemFactory(gtk.MenuBar, "<main>", accel)
self.add_accel_group(accel)
ifc.create_items(menu_items)
self.menu_bar = ifc.get_widget("<main>")
pbox.pack_start(self.menu_bar, False, True, 0)
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_AUTOMATIC,gtk.POLICY_AUTOMATIC)
self.data_view = gtk.TreeView()
self.data_view.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
dc = gtk.TreeViewColumn()
dc.set_title("Index")
c = gtk.CellRendererText()
c.set_property('editable',False)
dc.pack_start(c,True)
dc.add_attribute(c,"text",0)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Flags")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_flags)
dc.pack_start(c,True)
dc.add_attribute(c,"text",1)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Free")
c = gtk.CellRendererToggle()
#c.connect('toggled', self.editor_callback_free)
dc.pack_start(c,True)
dc.add_attribute(c,"active",12)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Prop Type")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_proptype)
dc.pack_start(c,True)
dc.add_attribute(c,"text",2)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Location")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_location)
dc.pack_start(c,True)
dc.add_attribute(c,"text",3)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Rotate?")
c = gtk.CellRendererToggle()
#c.set_activatable(True)
#c.connect('toggled', self.editor_callback_rotate)
dc.pack_start(c,True)
dc.add_attribute(c,"active",4)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Aspect")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_aspect)
dc.pack_start(c,True)
dc.add_attribute(c,"text",5)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("D1")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_d1)
dc.pack_start(c,True)
dc.add_attribute(c,"text",6)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("D2")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_d2)
dc.pack_start(c,True)
dc.add_attribute(c,"text",7)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("D3")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_d3)
dc.pack_start(c,True)
dc.add_attribute(c,"text",8)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Prop Reference")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_propref)
dc.pack_start(c,True)
dc.add_attribute(c,"text",9)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Storage")
c = gtk.CellRendererText()
c.set_property('editable',True)
c.connect('edited', self.editor_callback_storage)
dc.pack_start(c,True)
dc.add_attribute(c,"text",10)
self.data_view.append_column(dc)
dc = gtk.TreeViewColumn()
dc.set_title("Unknown")
c = gtk.CellRendererText()
c.set_property('editable',False)
#c.connect('edited', self.editor_callback_u)
dc.pack_start(c,True)
dc.add_attribute(c,"text",13)
self.data_view.append_column(dc)
sw.add(self.data_view)
pbox.pack_start(sw, True, True, 5)
hbox = gtk.HBox(False,0)
#hbox.pack_start(gtk.Label("Search:"))
hbox.pack_start(gtk.Label("Search by... Index:"))
self.search_index = gtk.Entry()
self.search_index.set_width_chars(6)
self.search_index.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 11)))
hbox.pack_start(self.search_index)
hbox.pack_start(gtk.Label("Flags:"))
self.search_flags = gtk.Entry()
self.search_flags.set_width_chars(6)
self.search_flags.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 1)))
hbox.pack_start(self.search_flags)
hbox.pack_start(gtk.Label("PropType:"))
self.search_proptype = gtk.Entry()
self.search_proptype.set_width_chars(12)
self.search_proptype.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 2)))
hbox.pack_start(self.search_proptype)
hbox.pack_start(gtk.Label("Location:"))
self.search_location = gtk.Entry()
self.search_location.set_width_chars(8)
self.search_location.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 3)))
hbox.pack_start(self.search_location)
hbox.pack_start(gtk.Label("Aspect:"))
self.search_aspect = gtk.Entry()
self.search_aspect.set_width_chars(4)
self.search_aspect.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 5)))
hbox.pack_start(self.search_aspect)
hbox.pack_start(gtk.Label("d1:"))
self.search_d1 = gtk.Entry()
self.search_d1.set_width_chars(6)
self.search_d1.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 6)))
hbox.pack_start(self.search_d1)
hbox.pack_start(gtk.Label("d2:"))
self.search_d2 = gtk.Entry()
self.search_d2.set_width_chars(6)
self.search_d2.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 7)))
hbox.pack_start(self.search_d2)
hbox.pack_start(gtk.Label("d3:"))
self.search_d3 = gtk.Entry()
self.search_d3.set_width_chars(8)
self.search_d3.connect("changed", self.criterion_change,
(lambda i: self.tree_data.get_value(i, 8)))
hbox.pack_start(self.search_d3)
#self.searchbutton = gtk.Button("Search")
#hbox.pack_start(self.searchbutton)
#self.showall = gtk.Button("Show All")
#hbox.pack_start(self.showall)
pbox.pack_start(hbox, False, True, 0)
self.add(pbox)
def file_save(self, *argv):
self.props.empty()
itr = self.tree_data.get_iter_first()
while itr:
self.props.append(delv.level.PropListEntry(
flags=int(self.tree_data.get_value(itr, 1)[2:],16),
loc=int(self.tree_data.get_value(itr, 3)[2:8],16),
aspect=int(self.tree_data.get_value(itr, 5))|(
0x20 if self.tree_data.get_value(itr, 4) else 0x00),
proptype=int(self.tree_data.get_value(itr, 2)[2:5],16),
d3=int(self.tree_data.get_value(itr, 8)[2:],16),
propref=int(self.tree_data.get_value(itr, 9)[2:],16),
storeref=int(self.tree_data.get_value(itr, 10)[2:],16),
u=int(self.tree_data.get_value(itr, 13)[2:],16),
))
itr = self.tree_data.iter_next(itr)
self.res.set_data(self.props.get_data())
self.set_saved()
self.redelv.set_unsaved()
def load(self,*argv):
if self.res.subindex == 128:
self.lmap = self.library.get_object(self.res.resid - 0x0100)
else: self.lmap = None
self.props = self.canonical_object
self.tree_data = gtk.ListStore(str,str,str,str,bool,str,
str,str,str,str,str,int,bool,str)
self.tree_filter = self.tree_data.filter_new()
self.tree_filter.set_visible_func(self.search_filter)
self.data_view.set_model(self.tree_filter)
for idx,prop in enumerate(self.props):
self.tree_data.append(["%d"%idx, "0x%02X"%prop.flags,
"0x%03X (%s)"%(prop.proptype,prop.get_name(self.library)),
prop.textual_location(),
prop.rotated, "%d"%prop.aspect, "%d"%prop.get_d1(),
"%d"%prop.get_d2(), "0x%04X"%prop.get_d3(),
"0x%04X"%prop.propref, "0x%08X"%prop.storeref, idx,
prop.okay_to_take(),"0x%04X"%prop.u
])
def criterion_change(self, entry, accessor):
new_text = entry.get_text()
if not new_text:
del self.search_criteria[accessor]
else:
try:
self.search_criteria[accessor] = SearchCriterion(
new_text, accessor)
except: pass
self.tree_filter.refilter()
def search_filter(self, model, itr, *argv):
for criterion in self.search_criteria.values():
if not criterion.evaluate(itr): return False
return True
def editor_callback_location(self, renderer, path, new_text):
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
new_text = new_text.replace('(','').replace(')','').strip()
oldloc = int(self.tree_data.get_value(itr,3)[2:8],16)
try:
base = 10
if '0x' in new_text or '$' in new_text:
base = 16
new_text = new_text.replace('$','').replace('0x','')
if ',' in new_text:
x,y = new_text.split(',')
x,y = int(x,base),int(y,base)
rloc =(x<<12)|y
elif '@' in new_text:
new_text = new_text.replace('@','')
container = int(new_text.strip().split()[0],base)
rloc = (oldloc&0xFF0000)|(container+0x100)
elif '#' in new_text:
new_text = new_text.replace('#','')
container = int(new_text.strip().split()[0],base)
rloc = (oldloc&0xFF0000)|container
else:
rloc = int(new_text.strip().split()[0],base)
except: return
flags = int(self.tree_data.get_value(itr,1)[2:],16)
self.tree_data.set_value(itr, 3,
delv.level.textual_location(flags, rloc))
self.set_unsaved()
def editor_callback_flags(self, renderer, path, new_text):
try:
ival = int(new_text.replace('0x','').replace('$',''), 16)
except:
return
if ival < 0 or ival > 255: return
# This is just hideous... maybe not worth it to have save/revert
# as usual pygtk not making it any nicer either
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
loc = int(self.tree_data.get_value(itr,3)[2:8],16)
proptype = int(self.tree_data.get_value(itr,2)[2:5],16)
aspect = int(self.tree_data.get_value(itr,5))
self.tree_data.set_value(itr, 1, "0x%02X"%ival)
self.tree_data.set_value(itr, 12, ival&0x01)
self.tree_data.set_value(itr, 3,
delv.level.textual_location(ival, loc))
self.tree_data.set_value(itr, 2,
"0x%03X (%s)"%(proptype, delv.level.proptypename_with_flags(
ival, proptype, aspect, self.redelv.get_library())))
self.set_unsaved()
#def editor_callback_free(self, renderer, path, new_text):
# itr = self.tree_data.get_iter(path)flags =
def editor_callback_aspect(self, renderer, path, new_text):
try:
aspect = int(new_text.strip())
except: return
if aspect < 0 or aspect > 31: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
proptype = int(self.tree_data.get_value(itr,2)[2:5],16)
flags = int(self.tree_data.get_value(itr,1)[2:],16)
self.tree_data.set_value(itr, 2,
"0x%03X (%s)"%(proptype, delv.level.proptypename_with_flags(
flags, proptype, aspect, self.redelv.get_library())))
self.tree_data.set_value(itr, 5,
"%d"%aspect)
self.set_unsaved()
def editor_callback_d1(self , renderer, path, new_text):
try:
if '0x' in new_text or '$' in new_text:
d1 = int(new_text.strip().split()[0].replace(
'0x','').replace('$',''), 16)
else:
d1 = int(new_text.strip().split()[0])
except Exception,e:
print repr(e)
return
if d1 < 0 or d1 > 255: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
d3 = int(self.tree_data.get_value(itr, 8)[2:],16)
d3 &= 0x00FF
d3 |= (d1<<8)
self.tree_data.set_value(itr, 6, "%d"%d1)
self.tree_data.set_value(itr, 8, "0x%04X"%d3)
def editor_callback_d2(self , renderer, path, new_text):
try:
if '0x' in new_text or '$' in new_text:
d2 = int(new_text.strip().split()[0].replace(
'0x','').replace('$',''), 16)
else:
d2 = int(new_text.strip().split()[0])
except Exception,e:
print repr(e)
return
if d2 < 0 or d2 > 255: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
d3 = int(self.tree_data.get_value(itr, 8)[2:],16)
d3 &= 0xFF00
d3 |= d2
self.tree_data.set_value(itr, 7, "%d"%d2)
self.tree_data.set_value(itr, 8, "0x%04X"%d3)
def editor_callback_d3(self , renderer, path, new_text):
try:
if '0x' in new_text or '$' in new_text:
d3 = int(new_text.strip().split()[0].replace(
'0x','').replace('$',''), 16)
else:
d3 = int(new_text.strip().split()[0])
except Exception,e:
print repr(e)
return
if d3 < 0 or d3 > 0xFFFF: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
self.tree_data.set_value(itr, 6, "%d"%(d3>>8) )
self.tree_data.set_value(itr, 7, "%d"%(d3&0x00FF))
self.tree_data.set_value(itr, 8, "0x%04X"%d3)
def editor_callback_proptype(self, renderer, path, new_text):
try:
proptype = int(new_text.split()[0].replace(
'0x','').replace('$',''), 16)
except: return
if proptype < 0 or proptype > 0x3FF: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
aspect = int(self.tree_data.get_value(itr,5))
flags = int(self.tree_data.get_value(itr,1)[2:],16)
self.tree_data.set_value(itr, 2,
"0x%03X (%s)"%(proptype, delv.level.proptypename_with_flags(
flags, proptype, aspect, self.redelv.get_library())))
self.set_unsaved()
def editor_callback_storage(self, renderer, path, new_text):
try:
storeref = int(new_text.strip().split()[0].replace(
'0x','').replace('$',''), 16)
except: return
if storeref < 0 or storeref > 0xFFFF: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
self.tree_data.set_value(itr, 10,"0x%08X"%storeref)
self.set_unsaved()
def editor_callback_propref(self, renderer, path, new_text):
try:
propref = int(new_text.strip().split()[0].replace(
'0x','').replace('$',''), 16)
except: return
if storeref < 0 or storeref > 0xFFFFFFFF: return
itr = self.tree_data.get_iter(
self.tree_filter.convert_path_to_child_path(path))
self.tree_data.set_value(itr, 9,"0x%04X"%propref)
self.set_unsaved()
def editor_setup(self):
self.set_title("Prop List Editor [%04X]"%self.res.resid)
self.map_editor = None
self.library = self.redelv.get_library()
self.load()
def edit_insert(self, *argv):
idx = len(self.tree_data)
self.tree_data.append(["%d"%idx, "0xFF", "0x000", "0x000000",
False, "0", "0","0","0x0000", "0x00000000", "0x0000", idx, True,
"0x0000"
])
def cleanup(self):
if self.map_editor: self.map_editor.prop_editor = None
def open_map(self, *argv):
if self.map_editor:
self.map_editor.show_all()
self.map_editor.present()
else:
self.map_editor = self.redelv.open_editor(self.res.resid-0x0100)
self.map_editor.marry(self)
self.map_editor.prop_editor = self
def reload_map(self, *argv):
if not self.map_editor: self.open_map()
self.map_editor.revert()
def send_selection(self, *argv):
if not self.map_editor: self.open_map()
tm,paths = self.data_view.get_selection().get_selected_rows()
selected = [
tm.get_value(tm.get_iter(path),11) for path in paths]
self.map_editor.change_selection(selected)
self.map_editor.scroll_to_selection()
def select_all(self,*argv):
self.data_view.get_selection().select_all()
def select_props_by_index(self, selection):
tsel = self.data_view.get_selection()
tsel.unselect_all()
if not selection: return
itr = self.tree_data.get_iter_first()
while itr:
index = self.tree_data.get_value(itr, 11)
# you see what I have to put up with:
if index in selection:
try:
pth = self.tree_filter.convert_child_path_to_path(
self.tree_data.get_path(itr))
tsel.select_path(pth)
self.data_view.scroll_to_cell(pth)
except TypeError: pass
itr = self.tree_data.iter_next(itr)
# Note to self, make this support general tile layers
# Also, abstract the tile drawing so the code can be reused in something
# other than pygtk
# Idea: change palette to show selection
class MapEditor(editors.Editor):
name = "Map Editor [nothing opened]"
default_size = 800,600
co_object = delv.level.Map
def gui_setup(self):
self.mouse_position = 0,0
pbox = gtk.VBox(False,0)
self.set_default_size(*self.default_size)
menu_items = (
("/File/Save Resource", "<control>S", None, 0, None),
("/File/Revert", None, self.revert, 0, None),
("/File/Export Image", None, self.export_img, 0, None),
("/Edit/Cut", "<control>X", None, 0, None),
("/Edit/Copy", "<control>C", None, 0, None),
("/Edit/Paste", "<control>V", None, 0, None),
("/Edit/Clear", None, None, 0, None),
("/Tool/Cursor", "C", None, 0, None),
("/Tool/Pencil", "N", None, 0, None),
("/Tool/Brush", "B", None, 0, None),
("/Tool/Rectangle Select", "R", None, 0, None),
("/Tool/Tile Select", "T", self.tile_select, 0, None),
("/Tool/Stamp", "M", None, 0, None),
("/Tool/Eyedropper", "E", None, 0, None),
("/Tool/Prop Select", "P", self.prop_select, 0, None),
("/Tool/Select None", "X", self.select_none, 0, None),
("/View/Preview Palette Animation",None,None, 0, None),
("/View/Display Roof Layer",None, None, 0, None),
("/View/Display Props", None, None, 0, None),
("/View/Send Selection to Prop List","<alt>S",self.send_selection,
0,None),
("/View/Scroll to Selection","L",self.scroll_to_selection,0,None),
("/Windows/Tile Selector", "<control>T", None, 0, None),
("/Windows/Props List", "<control>P", self.open_props, 0, None),
("/Windows/Brushes", "<control>B", None, 0, None),
("/Windows/Stamps", "<control>M", None, 0, None),
("/Windows/Map Boundaries", None, None, 0, None),
)
accel = gtk.AccelGroup()
ifc = gtk.ItemFactory(gtk.MenuBar, "<main>", accel)
self.add_accel_group(accel)
ifc.create_items(menu_items)
self.menu_bar = ifc.get_widget("<main>")
pbox.pack_start(self.menu_bar, False, True, 0)
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_AUTOMATIC,gtk.POLICY_AUTOMATIC)
self.display = gtk.Image()
self.eventbox = gtk.EventBox()
self.eventbox.add_events(
gtk.gdk.POINTER_MOTION_MASK|gtk.gdk.BUTTON_PRESS)
self.eventbox.connect("motion-notify-event", self.mouse_movement)
self.eventbox.connect("button-press-event", self.mouse_click)
self.eventbox.add(self.display)
self.sbox = gtk.Fixed()
self.sbox.put(self.eventbox, 0,0)
sw.add_with_viewport(self.sbox)
pbox.pack_start(sw, True, True, 0)
self.sw = sw
hbox = gtk.HBox(False,0)
hbox.pack_start(gtk.Label("Cursor:"),False,True,0)
self.w_xpos = gtk.Entry()
self.w_xpos.set_width_chars(4)
self.w_xpos.set_editable(False)
hbox.pack_start(self.w_xpos,False,True,0)
self.w_ypos = gtk.Entry()
self.w_ypos.set_width_chars(4)
self.w_ypos.set_editable(False)
hbox.pack_start(self.w_ypos,False,True,0)
hbox.pack_start(gtk.Label("Map Data:"),False,True,0)
self.w_mapdata = gtk.Entry()
self.w_mapdata.set_width_chars(6)
self.w_mapdata.set_editable(False)
hbox.pack_start(self.w_mapdata,False,True,0)
hbox.pack_start(gtk.Label("Name:"),False,True,0)
self.w_name = gtk.Entry()
self.w_name.set_editable(False)
hbox.pack_start(self.w_name,False,True,0)
hbox.pack_start(gtk.Label("Attr:"),False,True,0)
self.w_attr = gtk.Entry()
self.w_attr.set_width_chars(10)
self.w_attr.set_editable(False)
hbox.pack_start(self.w_attr,True, True, 0)
hbox.pack_start(gtk.Label("Faux Prop:"),False,True,0)
self.w_faux = gtk.Entry()
self.w_faux.set_width_chars(9)
self.w_faux.set_editable(False)
hbox.pack_start(self.w_faux,True, True, 0)
hbox.pack_start(gtk.Label("FP Tile:"),False,True,0)
self.w_fauxtile = gtk.Entry()
self.w_fauxtile.set_width_chars(6)
self.w_fauxtile.set_editable(False)
hbox.pack_start(self.w_fauxtile,True, True, 0)
hbox.pack_start(gtk.Label("FP Attr:"),False,True,0)
self.w_fauxattr = gtk.Entry()
self.w_fauxattr.set_width_chars(10)
self.w_fauxattr.set_editable(False)
hbox.pack_start(self.w_fauxattr,True, True, 0)
hbox.pack_start(gtk.Label("FP Offs:"),False,True,0)
self.w_fauxoffs = gtk.Entry()
self.w_fauxoffs.set_width_chars(5)
self.w_fauxoffs.set_editable(False)
hbox.pack_start(self.w_fauxoffs,True, True, 0)
pbox.pack_start(hbox, False, True, 0)
self.w_props = gtk.Entry()
self.w_props.set_width_chars(80)
self.w_props.set_editable(False)
pbox.pack_start(self.w_props, False, True, 0)
self.add(pbox)
#def set_view(self,x=None,y=None):
# if x is not None: self.view_x = x
# if y is not None: self.view_y = y
#def get_view_rect(self):
# return (self.view_x,self.view_y,
# self.sw.allocation.width,self.sw.allocation.height)
def editor_setup(self):
self.set_title("Map Editor [%04X]"%self.res.resid)
self.prop_editor = None
self.click_tool = None
self.selection = None
self.library = self.redelv.get_library()
self.load()
self.pixmap = gtk.gdk.Pixmap(None,
self.lmap.width*32, self.lmap.height*32,
gtk.gdk.visual_get_system().depth)
print 0,0,self.lmap.width*32, self.lmap.height*32
self.gc = self.pixmap.new_gc(function=gtk.gdk.COPY)
self.gc.set_foreground(gtk.gdk.Color(pixel=0x00000000))
#self.gc.set_background(gtk.gdk.Color(255,0,0))
self.pixmap.draw_rectangle(self.gc, True,
0,0,self.lmap.width*32,self.lmap.height*32)
#self.view_rect=0,0,self.sw.allocation.width,self.sw.allocation.height
self.draw_map()
def draw_tile(self, x, y, tid, pal=delv.colormap.rgb24, as_prop=False,
offset=(0,0),rotated=False,inhibit=False):
if not tid: return
tile = self.library.get_tile(tid)
xo,yo = offset[::-1] if rotated else offset
attr = tile.attributes
if not inhibit:
if rotated: #refactor this now that we understand howit works FIXME
if as_prop and attr & 0x00000C0 == 0x40:
self.draw_tile(x-1,y, tile.index-1, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
elif as_prop and attr & 0x00000C0 == 0x80:
self.draw_tile(x,y-1, tile.index-1, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
elif as_prop and attr & 0x00000C0 == 0xC0:
self.draw_tile(x-1,y-1, tile.index-3, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
self.draw_tile(x-1,y, tile.index-2, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
self.draw_tile(x,y-1, tile.index-1, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
else:
if as_prop and attr & 0x00000C0 == 0x40:
self.draw_tile(x,y-1, tile.index-1, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
elif as_prop and attr & 0x00000C0 == 0x80:
self.draw_tile(x-1,y, tile.index-1, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
elif as_prop and attr & 0x00000C0 == 0xC0:
self.draw_tile(x-1,y-1, tile.index-3, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
self.draw_tile(x,y-1, tile.index-2, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
self.draw_tile(x-1,y, tile.index-1, pal=pal,as_prop=True,
offset=offset,rotated=rotated,inhibit=True)
if tile.requires_mask or as_prop:
self.gc.set_clip_origin(x*32-xo, y*32-yo)
self.gc.set_clip_mask(tile.get_pixmap_mask(gtk,rotated))
else:
self.gc.set_clip_mask(None)
self.pixmap.draw_indexed_image(self.gc, x*32-xo, y*32-yo, 32, 32,
gtk.gdk.RGB_DITHER_NORMAL, tile.get_image(rotated),
32, pal)
if tile.fauxprop:
fauxprop = self.library.get_prop(tile.fauxprop)
fptile = fauxprop.get_tile(tile.fauxprop_aspect)
self.draw_tile(x, y, fptile, pal=pal,as_prop=True,
offset=fauxprop.get_offset(tile.fauxprop_aspect),
rotated=tile.fauxprop_rotate)
# FIXME needs to incorporate faux props into the prop list and draw
# them under the same priority system as listed props
def draw_map(self,stop=None):
for y in xrange(stop[1] if stop else self.lmap.height):
for x in xrange(stop[0] if stop else self.lmap.width):
self.draw_tile(x,y,self.lmap.map_data[x+y*self.lmap.width])
for y in xrange(self.lmap.height):
for x in xrange(self.lmap.width):
if not self.props: continue
prpat = self.props.props_at((x,y))
visible = filter(lambda r:r.show_in_map(), prpat)[::-1]
visible.sort(key=(lambda p: self.library.get_tile(
self.library.get_prop(p.proptype).get_tile(
p.aspect)).draw_priority()))
for p in visible:
x,y = p.get_loc()
proptype = self.library.get_prop(p.proptype)
proptile = proptype.get_tile(p.aspect)
self.draw_tile(x,y,proptile,
offset=proptype.get_offset(p.aspect), as_prop=True,
rotated=p.rotated)
# draw invisible props
invisible = filter(lambda r: not r.show_in_map(), prpat)
for p in invisible:
x,y = p.get_loc()
proptype = self.library.get_prop(p.proptype)
if p.flags == 0x42:
proptile = proptype.get_debug_tile(p.aspect)
else:
proptile = 0x017F
self.draw_tile(x,y,proptile,
offset=proptype.get_offset(p.aspect), as_prop=True,
rotated=p.rotated)
if isinstance(self.selection, tuple):
x,y = self.selection
self.draw_tile(x,y,
self.lmap.map_data[x+y*self.lmap.width],
pal=delv.colormap.selected_rgb24)
elif self.selection is None:
pass
elif self.props:
for pidx in self.selection:
p = self.props[pidx]
proptype = self.library.get_prop(p.proptype)
if p.show_in_map():
proptile = proptype.get_tile(p.aspect)
else:
proptile = proptype.get_debug_tile(p.aspect)
x,y = p.loc
self.draw_tile(x,y,proptile,
offset=proptype.get_offset(p.aspect), as_prop=True,
rotated=p.rotated, pal=delv.colormap.selected_rgb24)
self.display.set_from_pixmap(self.pixmap,None)
def revert(self, *argv):
#if self.unsaved and not
self.selection = None
self.load()
self.draw_map()
def load(self, *argv):
self.lmap = self.canonical_object
self.props = self.library.get_object(self.res.resid + 0x0100)
def change_selection(self, ns):
if self.selection == ns: return
self.selection = ns
self.draw_map()
def scroll_to_tile(self, x, y):
x = max(0,min(32*(x-2),
self.display.allocation.width-self.sw.allocation.width))
y = max(0,min(32*(y-2),
self.display.allocation.height-self.sw.allocation.height))
self.sw.get_hadjustment().set_value(x)
self.sw.get_vadjustment().set_value(y)
def select_none(self, *argv):
self.change_selection(None)
def tile_select(self, *argv):
self.click_tool = self.tile_select_tool
def tile_select_tool(self, x, y):
self.change_selection((x,y))
def prop_select(self, *argv):
self.click_tool = self.prop_select_tool
def prop_select_tool(self, x, y):
self.change_selection([p.index for p in self.props.props_at((x,y))])
def update_cursor_info(self):
x,y = self.mouse_position
self.w_xpos.set_text(str(x))
self.w_ypos.set_text(str(y))
self.w_mapdata.set_text("0x%04X"%(
self.lmap.map_data[x+y*self.lmap.width]))
tile = self.library.get_tile(self.lmap.get_tile(x,y))
self.w_name.set_text(
tile.get_name())
self.w_attr.set_text(
"0x%08X"%tile.attributes)
self.w_faux.set_text(
"0x%03X:%X:%d"%(tile.fauxprop,tile.fauxprop_aspect,
tile.fauxprop_rotate))
if tile.fauxprop:
fp = self.library.get_prop(tile.fauxprop)
self.w_fauxtile.set_text(
"0x%04X"%(fp.tile))
self.w_fauxattr.set_text(
"0x%08X"%self.library.get_tile(fp.get_tile(
tile.fauxprop_aspect)).attributes)
self.w_fauxoffs.set_text(
"%d,%d"%fp.get_offset(tile.fauxprop_aspect))
else:
self.w_fauxtile.set_text("(NA)")
self.w_fauxattr.set_text("(NA)")
self.w_fauxoffs.set_text("(NA)")
if self.props: p = self.props.props_at((x,y))
else: return
if not p:
self.w_props.set_text("(No props)")
else:
self.w_props.set_text(', '.join(map(
lambda p:p.debug(self.library),p)))
def mouse_movement(self, widget, event):
if event.x is None or event.y is None: return
x,y= widget.translate_coordinates(self.display,
int(event.x),int(event.y))
newp = x//32,y//32
if newp != self.mouse_position:
self.mouse_position = newp
self.update_cursor_info()
def scroll_to_selection(self, *argv):
if isinstance(self.selection, tuple):
self.scroll_to_tile(*self.selection)
elif self.selection is None:
self.scroll_to_tile(0,0)
elif self.props:
for pidx in self.selection:
p = self.props[pidx]
if not p.inside_something():
self.scroll_to_tile(*p.loc)
break
def mouse_click(self, widget, event):
if event.x is None or event.y is None: return
x,y= widget.translate_coordinates(self.display,
int(event.x),int(event.y))
newp = x//32,y//32
if self.click_tool: self.click_tool(*newp)
def export_img(self, *argv):
path = self.ask_save_path(default = "Map%04X.png"%self.res.resid)
if not path: return
if not path.endswith(".png"): path += ".png"
pbuf = self.get_pixbuf_from_pixmap()
pbuf.save(path, "png", {})
def get_pixbuf_from_pixmap(self):
pbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, False, 8,
self.lmap.width*32,self.lmap.height*32)
pbuf.get_from_drawable(self.pixmap, gtk.gdk.colormap_get_system(),
0,0,0,0,self.lmap.width*32,self.lmap.height*32)
return pbuf
def open_props(self, *argv):
if self.prop_editor:
self.prop_editor.show_all()
self.prop_editor.present()
else:
self.prop_editor = self.redelv.open_editor(self.res.resid+0x0100)
self.prop_editor.marry(self)
self.prop_editor.map_editor = self
def cleanup(self):
if self.prop_editor: self.prop_editor.map_editor = None
def send_selection(self, *argv):
if not isinstance(self.selection, list): pass
if not self.prop_editor: self.open_props()
self.prop_editor.select_props_by_index(self.selection)
| BryceSchroeder/delvmod | redelv/redelvlib/level_editors.py | Python | gpl-3.0 | 41,490 |
from django.views.generic import RedirectView
from django.conf.urls import url
from . import views
app_name = 'micropress'
urlpatterns = [
url(r'^$', RedirectView.as_view(url='article/', permanent=False)),
url(r'^article/$', views.ArticleListView.as_view(),
name='article_list'),
url(r'^article/(?P<slug>[-\w]+)/$', views.ArticleDetailView.as_view(),
name='article_detail'),
url(r'^post/$', views.ArticleCreateView.as_view(), name='article_create'),
]
| jbradberry/django-micro-press | micropress/urls.py | Python | mit | 487 |
#!/usr/bin/env python
import sys
import shutil
from cdrouter import CDRouter
from cdrouter.configs import Config
if len(sys.argv) < 3:
print('usage: <base_url> <token> [<config-ids>] [<device-ids>] [<package-ids>] [<result-ids>]')
sys.exit(1)
base = sys.argv[1]
token = sys.argv[2]
config_ids = None
device_ids = None
package_ids = None
result_ids = None
if len(sys.argv) > 3 and len(sys.argv[3]) > 0:
config_ids = [int(x.strip()) for x in sys.argv[3].split(',')]
if len(sys.argv) > 4 and len(sys.argv[4]) > 0:
device_ids = [int(x.strip()) for x in sys.argv[4].split(',')]
if len(sys.argv) > 5 and len(sys.argv[5]) > 0:
package_ids = [int(x.strip()) for x in sys.argv[5].split(',')]
if len(sys.argv) > 6 and len(sys.argv[6]) > 0:
result_ids = [int(x.strip()) for x in sys.argv[6].split(',')]
c = CDRouter(base, token=token)
b, filename = c.exports.bulk_export(
config_ids=config_ids,
device_ids=device_ids,
package_ids=package_ids,
result_ids=result_ids
)
with open(filename, 'wb') as fd:
shutil.copyfileobj (b, fd)
print(filename)
| qacafe/cdrouter.py | examples/bulk_export.py | Python | mit | 1,089 |
from datetime import date, timedelta
import io
import os
import json
import shutil
import random
import string
import unittest
from unittest import mock
import tempfile
import responses
from sqlalchemy import Column, Table, Unicode, create_engine
from cumulusci.core.exceptions import BulkDataException, TaskOptionsError
from cumulusci.tasks.bulkdata import LoadData
from cumulusci.tasks.bulkdata.step import (
DataOperationResult,
DataOperationJobResult,
DataOperationType,
DataOperationStatus,
DataApi,
)
from cumulusci.tasks.bulkdata.tests.utils import (
_make_task,
FakeBulkAPI,
FakeBulkAPIDmlOperation,
)
from cumulusci.utils import temporary_dir
from cumulusci.tasks.bulkdata.mapping_parser import MappingLookup, MappingStep
from cumulusci.tests.util import (
assert_max_memory_usage,
mock_describe_calls,
)
from cumulusci.utils.backports.py36 import nullcontext
class TestLoadData(unittest.TestCase):
mapping_file = "mapping_v1.yml"
@responses.activate
@mock.patch("cumulusci.tasks.bulkdata.load.get_dml_operation")
def test_run(self, dml_mock):
responses.add(
method="GET",
url="https://example.com/services/data/v46.0/query/?q=SELECT+Id+FROM+RecordType+WHERE+SObjectType%3D%27Account%27AND+DeveloperName+%3D+%27HH_Account%27+LIMIT+1",
body=json.dumps({"records": [{"Id": "1"}]}),
status=200,
)
base_path = os.path.dirname(__file__)
db_path = os.path.join(base_path, "testdata.db")
mapping_path = os.path.join(base_path, self.mapping_file)
with temporary_dir() as d:
tmp_db_path = os.path.join(d, "testdata.db")
shutil.copyfile(db_path, tmp_db_path)
task = _make_task(
LoadData,
{
"options": {
"database_url": f"sqlite:///{tmp_db_path}",
"mapping": mapping_path,
}
},
)
task.bulk = mock.Mock()
task.sf = mock.Mock()
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
dml_mock.return_value = step
step.results = [
DataOperationResult("001000000000000", True, None),
DataOperationResult("003000000000000", True, None),
DataOperationResult("003000000000001", True, None),
]
mock_describe_calls()
task()
assert step.records == [
["TestHousehold", "1"],
["Test", "User", "test@example.com", "001000000000000"],
["Error", "User", "error@example.com", "001000000000000"],
]
with create_engine(task.options["database_url"]).connect() as c:
hh_ids = next(c.execute("SELECT * from households_sf_ids"))
assert hh_ids == ("1", "001000000000000")
def test_run_task__start_step(self):
task = _make_task(
LoadData,
{
"options": {
"database_url": "sqlite://",
"mapping": "mapping.yml",
"start_step": "Insert Contacts",
}
},
)
task._init_db = mock.Mock(return_value=nullcontext())
task._init_mapping = mock.Mock()
task.mapping = {}
task.mapping["Insert Households"] = MappingStep(sf_object="one", fields={})
task.mapping["Insert Contacts"] = MappingStep(sf_object="two", fields={})
task.after_steps = {}
task._execute_step = mock.Mock(
return_value=DataOperationJobResult(DataOperationStatus.SUCCESS, [], 0, 0)
)
task()
task._execute_step.assert_called_once_with(
MappingStep(sf_object="two", fields={})
)
def test_run_task__after_steps(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task._init_db = mock.Mock(return_value=nullcontext())
task._init_mapping = mock.Mock()
task._expand_mapping = mock.Mock()
task.mapping = {}
task.mapping["Insert Households"] = 1
task.mapping["Insert Contacts"] = 2
households_steps = {}
households_steps["four"] = 4
households_steps["five"] = 5
task.after_steps = {
"Insert Contacts": {"three": 3},
"Insert Households": households_steps,
}
task._execute_step = mock.Mock(
return_value=DataOperationJobResult(DataOperationStatus.SUCCESS, [], 0, 0)
)
task()
task._execute_step.assert_has_calls(
[mock.call(1), mock.call(4), mock.call(5), mock.call(2), mock.call(3)]
)
def test_run_task__after_steps_failure(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task._init_db = mock.Mock(return_value=nullcontext())
task._init_mapping = mock.Mock()
task._expand_mapping = mock.Mock()
task.mapping = {}
task.mapping["Insert Households"] = 1
task.mapping["Insert Contacts"] = 2
households_steps = {}
households_steps["four"] = 4
households_steps["five"] = 5
task.after_steps = {
"Insert Contacts": {"three": 3},
"Insert Households": households_steps,
}
task._execute_step = mock.Mock(
side_effect=[
DataOperationJobResult(DataOperationStatus.SUCCESS, [], 0, 0),
DataOperationJobResult(DataOperationStatus.JOB_FAILURE, [], 0, 0),
]
)
with self.assertRaises(BulkDataException):
task()
@responses.activate
@mock.patch("cumulusci.tasks.bulkdata.load.get_dml_operation")
def test_run__sql(self, dml_mock):
responses.add(
method="GET",
url="https://example.com/services/data/v46.0/query/?q=SELECT+Id+FROM+RecordType+WHERE+SObjectType%3D%27Account%27AND+DeveloperName+%3D+%27HH_Account%27+LIMIT+1",
body=json.dumps({"records": [{"Id": "1"}]}),
status=200,
)
base_path = os.path.dirname(__file__)
sql_path = os.path.join(base_path, "testdata.sql")
mapping_path = os.path.join(base_path, self.mapping_file)
task = _make_task(
LoadData, {"options": {"sql_path": sql_path, "mapping": mapping_path}}
)
task.bulk = mock.Mock()
task.sf = mock.Mock()
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
dml_mock.return_value = step
step.results = [
DataOperationResult("001000000000000", True, None),
DataOperationResult("003000000000000", True, None),
DataOperationResult("003000000000001", True, None),
]
mock_describe_calls()
task()
assert step.records == [
["TestHousehold", "1"],
["Test☃", "User", "test@example.com", "001000000000000"],
["Error", "User", "error@example.com", "001000000000000"],
]
def test_init_options__missing_input(self):
with self.assertRaises(TaskOptionsError):
_make_task(LoadData, {"options": {}})
def test_init_options__bulk_mode(self):
t = _make_task(
LoadData,
{
"options": {
"database_url": "file:///test.db",
"mapping": "mapping.yml",
"bulk_mode": "Serial",
}
},
)
assert t.bulk_mode == "Serial"
t = _make_task(
LoadData,
{"options": {"database_url": "file:///test.db", "mapping": "mapping.yml"}},
)
assert t.bulk_mode is None
def test_init_options__bulk_mode_wrong(self):
with self.assertRaises(TaskOptionsError):
_make_task(LoadData, {"options": {"bulk_mode": "Test"}})
def test_init_options__database_url(self):
t = _make_task(
LoadData,
{"options": {"database_url": "file:///test.db", "mapping": "mapping.yml"}},
)
assert t.options["database_url"] == "file:///test.db"
assert t.options["sql_path"] is None
def test_init_options__sql_path(self):
t = _make_task(
LoadData, {"options": {"sql_path": "test.sql", "mapping": "mapping.yml"}}
)
assert t.options["sql_path"] == "test.sql"
assert t.options["database_url"] is None
@mock.patch("cumulusci.tasks.bulkdata.load.validate_and_inject_mapping")
def test_init_mapping_passes_options_to_validate(self, validate_and_inject_mapping):
base_path = os.path.dirname(__file__)
t = _make_task(
LoadData,
{
"options": {
"sql_path": "test.sql",
"mapping": os.path.join(base_path, self.mapping_file),
"inject_namespaces": True,
"drop_missing_schema": True,
}
},
)
t._init_mapping()
validate_and_inject_mapping.assert_called_once_with(
mapping=t.mapping,
org_config=t.org_config,
namespace=t.project_config.project__package__namespace,
data_operation=DataOperationType.INSERT,
inject_namespaces=True,
drop_missing=True,
)
@responses.activate
def test_expand_mapping_creates_after_steps(self):
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, "mapping_after.yml")
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
mock_describe_calls()
task._init_mapping()
model = mock.Mock()
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
task.models = {"accounts": model, "contacts": model}
task._expand_mapping()
self.assertEqual({}, task.after_steps["Insert Opportunities"])
self.assertEqual(
[
"Update Account Dependencies After Insert Contacts",
"Update Contact Dependencies After Insert Contacts",
],
list(task.after_steps["Insert Contacts"].keys()),
)
lookups = {}
lookups["Id"] = MappingLookup(name="Id", table="accounts", key_field="sf_id")
lookups["Primary_Contact__c"] = MappingLookup(
table="contacts", name="Primary_Contact__c"
)
self.assertEqual(
MappingStep(
sf_object="Account",
api=DataApi.BULK,
action=DataOperationType.UPDATE,
table="accounts",
lookups=lookups,
fields={},
),
task.after_steps["Insert Contacts"][
"Update Account Dependencies After Insert Contacts"
],
)
lookups = {}
lookups["Id"] = MappingLookup(name="Id", table="contacts", key_field="sf_id")
lookups["ReportsToId"] = MappingLookup(table="contacts", name="ReportsToId")
self.assertEqual(
MappingStep(
sf_object="Contact",
api=DataApi.BULK,
action=DataOperationType.UPDATE,
table="contacts",
fields={},
lookups=lookups,
),
task.after_steps["Insert Contacts"][
"Update Contact Dependencies After Insert Contacts"
],
)
self.assertEqual(
["Update Account Dependencies After Insert Accounts"],
list(task.after_steps["Insert Accounts"].keys()),
)
lookups = {}
lookups["Id"] = MappingLookup(name="Id", table="accounts", key_field="sf_id")
lookups["ParentId"] = MappingLookup(table="accounts", name="ParentId")
self.assertEqual(
MappingStep(
sf_object="Account",
api=DataApi.BULK,
action=DataOperationType.UPDATE,
table="accounts",
fields={},
lookups=lookups,
),
task.after_steps["Insert Accounts"][
"Update Account Dependencies After Insert Accounts"
],
)
def test_stream_queried_data__skips_empty_rows(self):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
task.sf = mock.Mock()
mapping = MappingStep(
**{
"sf_object": "Account",
"action": "update",
"fields": {},
"lookups": {
"Id": MappingLookup(
**{"table": "accounts", "key_field": "account_id"}
),
"ParentId": MappingLookup(**{"table": "accounts"}),
},
}
)
task._query_db = mock.Mock()
task._query_db.return_value.yield_per = mock.Mock(
return_value=[
# Local Id, Loaded Id, Parent Id
["001000000001", "001000000005", "001000000007"],
["001000000002", "001000000006", "001000000008"],
["001000000003", "001000000009", None],
]
)
with tempfile.TemporaryFile("w+t") as local_ids:
records = list(
task._stream_queried_data(mapping, local_ids, task._query_db(mapping))
)
self.assertEqual(
[["001000000005", "001000000007"], ["001000000006", "001000000008"]],
records,
)
@responses.activate
def test_stream_queried_data__adjusts_relative_dates(self):
mock_describe_calls()
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
task.sf = mock.Mock()
mapping = MappingStep(
sf_object="Contact",
action="insert",
fields=["Birthdate"],
anchor_date="2020-07-01",
)
task._query_db = mock.Mock()
task._query_db.return_value.yield_per = mock.Mock(
return_value=[
# Local Id, Loaded Id, EmailBouncedDate
["001000000001", "2020-07-10"],
["001000000003", None],
]
)
local_ids = io.StringIO()
records = list(
task._stream_queried_data(mapping, local_ids, task._query_db(mapping))
)
self.assertEqual(
[[(date.today() + timedelta(days=9)).isoformat()], [None]],
records,
)
def test_get_statics(self):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
task.sf = mock.Mock()
task.sf.query.return_value = {"records": [{"Id": "012000000000000"}]}
self.assertEqual(
["Technology", "012000000000000"],
task._get_statics(
MappingStep(
sf_object="Account",
fields={"Id": "sf_id", "Name": "Name"},
static={"Industry": "Technology"},
record_type="Organization",
)
),
)
def test_get_statics_record_type_not_matched(self):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
task.sf = mock.Mock()
task.sf.query.return_value = {"records": []}
with self.assertRaises(BulkDataException) as e:
task._get_statics(
MappingStep(
sf_object="Account",
action="insert",
fields={"Id": "sf_id", "Name": "Name"},
static={"Industry": "Technology"},
record_type="Organization",
)
),
assert "RecordType" in str(e.exception)
@mock.patch("cumulusci.tasks.bulkdata.load.aliased")
def test_query_db__joins_self_lookups(self, aliased):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"accounts": model}
task.metadata = mock.Mock()
task.metadata.tables = {"accounts_sf_ids": mock.Mock()}
task.session = mock.Mock()
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {"sf_id": mock.Mock(), "name": mock.Mock()}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Account",
table="accounts",
action=DataOperationType.UPDATE,
fields={"Id": "sf_id", "Name": "name"},
lookups={
"ParentId": MappingLookup(
table="accounts", key_field="parent_id", name="ParentId"
)
},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
aliased.return_value.columns.sf_id,
)
# Validate that we asked for an outer join on the self-lookup
aliased.assert_called_once_with(task.metadata.tables["accounts_sf_ids"])
task.session.query.return_value.outerjoin.assert_called_once_with(
aliased.return_value, False
)
@mock.patch("cumulusci.tasks.bulkdata.load.aliased")
def test_query_db__person_accounts_enabled__account_mapping(self, aliased):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"accounts": model}
task.metadata = mock.Mock()
task.metadata.tables = {"accounts_sf_ids": mock.Mock()}
task.session = mock.Mock()
task._can_load_person_accounts = mock.Mock(return_value=True)
task._filter_out_person_account_records = mock.Mock()
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {
"sf_id": mock.Mock(),
"name": mock.Mock(),
"IsPersonAccount": mock.Mock(),
}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Account",
table="accounts",
action=DataOperationType.UPDATE,
fields={"Id": "sf_id", "Name": "name"},
lookups={
"ParentId": MappingLookup(
table="accounts", key_field="parent_id", name="ParentId"
)
},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
aliased.return_value.columns.sf_id,
)
# Validate person account records were not filtered out
task._can_load_person_accounts.assert_not_called()
task._filter_out_person_account_records.assert_not_called()
@mock.patch("cumulusci.tasks.bulkdata.load.aliased")
def test_query_db__person_accounts_disabled__account_mapping(self, aliased):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"accounts": model}
task.metadata = mock.Mock()
task.metadata.tables = {"accounts_sf_ids": mock.Mock()}
task.session = mock.Mock()
task._can_load_person_accounts = mock.Mock(return_value=False)
task._filter_out_person_account_records = mock.Mock()
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {
"sf_id": mock.Mock(),
"name": mock.Mock(),
"IsPersonAccount": mock.Mock(),
}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Account",
table="accounts",
action=DataOperationType.UPDATE,
fields={"Id": "sf_id", "Name": "name"},
lookups={
"ParentId": MappingLookup(
table="accounts", key_field="parent_id", name="ParentId"
)
},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
aliased.return_value.columns.sf_id,
)
# Validate person account records were not filtered out
task._can_load_person_accounts.assert_not_called()
task._filter_out_person_account_records.assert_not_called()
@mock.patch("cumulusci.tasks.bulkdata.load.aliased")
def test_query_db__person_accounts_enabled__contact_mapping(self, aliased):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"contacts": model}
task.metadata = mock.Mock()
task.metadata.tables = {
"contacts_sf_ids": mock.Mock(),
"accounts_sf_ids": mock.Mock(),
}
task.session = mock.Mock()
task._can_load_person_accounts = mock.Mock(return_value=True)
task._filter_out_person_account_records = mock.Mock()
# Make mock query chainable
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
task.session.query.outerjoin.return_value = task.session.query
task.session.query.order_by.return_value = task.session.query
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {
"sf_id": mock.Mock(),
"name": mock.Mock(),
"IsPersonAccount": mock.Mock(),
}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Contact",
table="contacts",
action=DataOperationType.UPDATE,
fields={"Id": "sf_id", "Name": "name"},
lookups={
"ParentId": MappingLookup(
table="accounts", key_field="parent_id", name="ParentId"
)
},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
aliased.return_value.columns.sf_id,
)
# Validate person contact records were not filtered out
task._can_load_person_accounts.assert_called_once_with(mapping)
task._filter_out_person_account_records.assert_called_once_with(
task.session.query.return_value, model
)
@mock.patch("cumulusci.tasks.bulkdata.load.aliased")
def test_query_db__person_accounts_disabled__contact_mapping(self, aliased):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"contacts": model}
task.metadata = mock.Mock()
task.metadata.tables = {
"contacts_sf_ids": mock.Mock(),
"accounts_sf_ids": mock.Mock(),
}
task.session = mock.Mock()
task._can_load_person_accounts = mock.Mock(return_value=False)
task._filter_out_person_account_records = mock.Mock()
# Make mock query chainable
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
task.session.query.outerjoin.return_value = task.session.query
task.session.query.order_by.return_value = task.session.query
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {
"sf_id": mock.Mock(),
"name": mock.Mock(),
"IsPersonAccount": mock.Mock(),
}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Contact",
table="contacts",
action=DataOperationType.UPDATE,
fields={"Id": "sf_id", "Name": "name"},
lookups={
"ParentId": MappingLookup(
table="accounts", key_field="parent_id", name="ParentId"
)
},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
aliased.return_value.columns.sf_id,
)
# Validate person contact records were not filtered out
task._can_load_person_accounts.assert_called_once_with(mapping)
task._filter_out_person_account_records.assert_not_called()
@mock.patch("cumulusci.tasks.bulkdata.load.aliased")
def test_query_db__person_accounts_enabled__neither_account_nor_contact_mapping(
self, aliased
):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"requests": model}
task.metadata = mock.Mock()
task.metadata.tables = {
"requests_sf_ids": mock.Mock(),
"accounts_sf_ids": mock.Mock(),
}
task.session = mock.Mock()
task._can_load_person_accounts = mock.Mock(return_value=True)
task._filter_out_person_account_records = mock.Mock()
# Make mock query chainable
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
task.session.query.outerjoin.return_value = task.session.query
task.session.query.order_by.return_value = task.session.query
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {"sf_id": mock.Mock(), "name": mock.Mock()}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Request__c",
table="requests",
action=DataOperationType.UPDATE,
fields={"Id": "sf_id", "Name": "name"},
lookups={
"ParentId": MappingLookup(
table="accounts", key_field="parent_id", name="ParentId"
)
},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
aliased.return_value.columns.sf_id,
)
# Validate person contact db records had their Name updated as blank
task._can_load_person_accounts.assert_not_called()
# Validate person contact records were not filtered out
task._filter_out_person_account_records.assert_not_called()
def test_initialize_id_table__already_exists(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.mapping = {}
with task._init_db():
id_table = Table(
"test_sf_ids",
task.metadata,
Column("id", Unicode(255), primary_key=True),
)
id_table.create()
task._initialize_id_table({"table": "test"}, True)
new_id_table = task.metadata.tables["test_sf_ids"]
self.assertFalse(new_id_table is id_table)
def test_initialize_id_table__already_exists_and_should_not_reset_table(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.mapping = {}
with task._init_db():
id_table = Table(
"test_sf_ids",
task.metadata,
Column("id", Unicode(255), primary_key=True),
)
id_table.create()
table_name = task._initialize_id_table({"table": "test"}, False)
assert table_name == "test_sf_ids"
new_id_table = task.metadata.tables["test_sf_ids"]
assert new_id_table is id_table
def test_run_task__exception_failure(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task._init_db = mock.Mock(return_value=nullcontext())
task._init_mapping = mock.Mock()
task._execute_step = mock.Mock(
return_value=DataOperationJobResult(
DataOperationStatus.JOB_FAILURE, [], 0, 0
)
)
task.mapping = {"Test": MappingStep(sf_object="Account")}
with self.assertRaises(BulkDataException):
task()
def test_process_job_results__insert_success(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(sf_object="Account")
task._process_job_results(mapping, step, local_ids)
task.session.connection.assert_called_once()
task._initialize_id_table.assert_called_once_with(mapping, True)
task._sql_bulk_insert_from_records.assert_called_once()
task.session.commit.assert_called_once()
def test_process_job_results__insert_rows_fail(self):
task = _make_task(
LoadData,
{
"options": {
"database_url": "sqlite://",
"mapping": "mapping.yml",
"ignore_row_errors": True,
}
},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
task.logger = mock.Mock()
local_ids = ["1", "2", "3", "4"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.job_result = DataOperationJobResult(
DataOperationStatus.ROW_FAILURE, [], 4, 4
)
step.end = mock.Mock()
step.results = [
DataOperationResult("001111111111111", False, None),
DataOperationResult("001111111111112", False, None),
DataOperationResult("001111111111113", False, None),
DataOperationResult("001111111111114", False, None),
]
mapping = MappingStep(sf_object="Account", table="Account")
task._process_job_results(mapping, step, local_ids)
task.session.connection.assert_called_once()
task._initialize_id_table.assert_called_once_with(mapping, True)
task._sql_bulk_insert_from_records.assert_not_called()
task.session.commit.assert_called_once()
assert len(task.logger.mock_calls) == 4
def test_process_job_results__update_success(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(sf_object="Account", action=DataOperationType.UPDATE)
task._process_job_results(mapping, step, local_ids)
task.session.connection.assert_not_called()
task._initialize_id_table.assert_not_called()
task._sql_bulk_insert_from_records.assert_not_called()
task.session.commit.assert_not_called()
def test_process_job_results__exception_failure(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.UPDATE,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult(None, False, "message")]
step.end()
mapping = MappingStep(sf_object="Account", action=DataOperationType.UPDATE)
with self.assertRaises(BulkDataException) as ex:
task._process_job_results(mapping, step, local_ids)
self.assertIn("Error on record with id", str(ex.exception))
self.assertIn("message", str(ex.exception))
def test_process_job_results__person_account_contact_ids__not_updated__mapping_action_not_insert(
self,
):
"""
Contact ID table is updated with Contact IDs for person account records
only if all:
❌ mapping's action is "insert"
✅ mapping's sf_object is Contact
✅ person accounts is enabled
✅ an account_id_lookup is found in the mapping
"""
# ❌ mapping's action is "insert"
action = DataOperationType.UPDATE
# ✅ mapping's sf_object is Contact
sf_object = "Contact"
# ✅ person accounts is enabled
can_load_person_accounts = True
# ✅ an account_id_lookup is found in the mapping
account_id_lookup = mock.Mock()
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
task._can_load_person_accounts = mock.Mock(
return_value=can_load_person_accounts
)
task._generate_contact_id_map_for_person_accounts = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(
sf_object=sf_object,
table="Account",
action=action,
lookups={},
)
if account_id_lookup:
mapping.lookups["AccountId"] = account_id_lookup
task._process_job_results(mapping, step, local_ids)
task._generate_contact_id_map_for_person_accounts.assert_not_called()
def test_process_job_results__person_account_contact_ids__not_updated__sf_object_not_contact(
self,
):
"""
Contact ID table is updated with Contact IDs for person account records
only if all:
✅ mapping's action is "insert"
❌ mapping's sf_object is Contact
✅ person accounts is enabled
✅ an account_id_lookup is found in the mapping
"""
# ✅ mapping's action is "insert"
action = DataOperationType.INSERT
# ❌ mapping's sf_object is Contact
sf_object = "Opportunity"
# ✅ person accounts is enabled
can_load_person_accounts = True
# ✅ an account_id_lookup is found in the mapping
account_id_lookup = mock.Mock()
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
task._can_load_person_accounts = mock.Mock(
return_value=can_load_person_accounts
)
task._generate_contact_id_map_for_person_accounts = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(
sf_object=sf_object,
table="Account",
action=action,
lookups={},
)
if account_id_lookup:
mapping.lookups["AccountId"] = account_id_lookup
task._process_job_results(mapping, step, local_ids)
task._generate_contact_id_map_for_person_accounts.assert_not_called()
def test_process_job_results__person_account_contact_ids__not_updated__person_accounts_not_enabled(
self,
):
"""
Contact ID table is updated with Contact IDs for person account records
only if all:
✅ mapping's action is "insert"
✅ mapping's sf_object is Contact
❌ person accounts is enabled
✅ an account_id_lookup is found in the mapping
"""
# ✅ mapping's action is "insert"
action = DataOperationType.INSERT
# ✅ mapping's sf_object is Contact
sf_object = "Contact"
# ❌ person accounts is enabled
can_load_person_accounts = False
# ✅ an account_id_lookup is found in the mapping
account_id_lookup = mock.Mock()
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
task._can_load_person_accounts = mock.Mock(
return_value=can_load_person_accounts
)
task._generate_contact_id_map_for_person_accounts = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(
sf_object=sf_object,
table="Account",
action=action,
lookups={},
)
if account_id_lookup:
mapping.lookups["AccountId"] = account_id_lookup
task._process_job_results(mapping, step, local_ids)
task._generate_contact_id_map_for_person_accounts.assert_not_called()
def test_process_job_results__person_account_contact_ids__not_updated__no_account_id_lookup(
self,
):
"""
Contact ID table is updated with Contact IDs for person account records
only if all:
✅ mapping's action is "insert"
✅ mapping's sf_object is Contact
✅ person accounts is enabled
❌ an account_id_lookup is found in the mapping
"""
# ✅ mapping's action is "insert"
action = DataOperationType.INSERT
# ✅ mapping's sf_object is Contact
sf_object = "Contact"
# ✅ person accounts is enabled
can_load_person_accounts = True
# ❌ an account_id_lookup is found in the mapping
account_id_lookup = None
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
task._can_load_person_accounts = mock.Mock(
return_value=can_load_person_accounts
)
task._generate_contact_id_map_for_person_accounts = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(
sf_object=sf_object,
table="Account",
action=action,
lookups={},
)
if account_id_lookup:
mapping.lookups["AccountId"] = account_id_lookup
task._process_job_results(mapping, step, local_ids)
task._generate_contact_id_map_for_person_accounts.assert_not_called()
def test_process_job_results__person_account_contact_ids__updated(self):
"""
Contact ID table is updated with Contact IDs for person account records
only if all:
✅ mapping's action is "insert"
✅ mapping's sf_object is Contact
✅ person accounts is enabled
✅ an account_id_lookup is found in the mapping
"""
# ✅ mapping's action is "insert"
action = DataOperationType.INSERT
# ✅ mapping's sf_object is Contact
sf_object = "Contact"
# ✅ person accounts is enabled
can_load_person_accounts = True
# ✅ an account_id_lookup is found in the mapping
account_id_lookup = MappingLookup(table="accounts")
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._initialize_id_table = mock.Mock()
task._sql_bulk_insert_from_records = mock.Mock()
task.bulk = mock.Mock()
task.sf = mock.Mock()
task._can_load_person_accounts = mock.Mock(
return_value=can_load_person_accounts
)
task._generate_contact_id_map_for_person_accounts = mock.Mock()
local_ids = ["1"]
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
step.results = [DataOperationResult("001111111111111", True, None)]
mapping = MappingStep(
sf_object=sf_object,
table="Account",
action=action,
lookups={"AccountId": account_id_lookup},
)
task._process_job_results(mapping, step, local_ids)
task._generate_contact_id_map_for_person_accounts.assert_called_once_with(
mapping, mapping.lookups["AccountId"], task.session.connection.return_value
)
task._sql_bulk_insert_from_records.assert_called_with(
connection=task.session.connection.return_value,
table=task._initialize_id_table.return_value,
columns=("id", "sf_id"),
record_iterable=task._generate_contact_id_map_for_person_accounts.return_value,
)
def test_generate_results_id_map__success(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
step = mock.Mock()
step.get_results.return_value = iter(
[
DataOperationResult("001000000000000", True, None),
DataOperationResult("001000000000001", True, None),
DataOperationResult("001000000000002", True, None),
]
)
generator = task._generate_results_id_map(
step, ["001000000000009", "001000000000010", "001000000000011"]
)
assert list(generator) == [
("001000000000009", "001000000000000"),
("001000000000010", "001000000000001"),
("001000000000011", "001000000000002"),
]
def test_generate_results_id_map__exception_failure(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
step = mock.Mock()
step.get_results.return_value = iter(
[
DataOperationResult("001000000000000", True, None),
DataOperationResult(None, False, "error"),
DataOperationResult("001000000000002", True, None),
]
)
with self.assertRaises(BulkDataException) as ex:
list(
task._generate_results_id_map(
step, ["001000000000009", "001000000000010", "001000000000011"]
)
)
self.assertIn("Error on record", str(ex.exception))
self.assertIn("001000000000010", str(ex.exception))
def test_generate_results_id_map__respects_silent_error_flag(self):
task = _make_task(
LoadData,
{
"options": {
"ignore_row_errors": True,
"database_url": "sqlite://",
"mapping": "mapping.yml",
}
},
)
step = mock.Mock()
step.get_results.return_value = iter(
[DataOperationResult(None, False, None)] * 15
)
with mock.patch.object(task.logger, "warning") as warning:
generator = task._generate_results_id_map(
step, ["001000000000009", "001000000000010", "001000000000011"] * 15
)
_ = list(generator) # generate the errors
assert len(warning.mock_calls) == task.row_warning_limit + 1 == 11
assert "warnings suppressed" in str(warning.mock_calls[-1])
step = mock.Mock()
step.get_results.return_value = iter(
[
DataOperationResult("001000000000000", True, None),
DataOperationResult(None, False, None),
DataOperationResult("001000000000002", True, None),
]
)
generator = task._generate_results_id_map(
step, ["001000000000009", "001000000000010", "001000000000011"]
)
assert list(generator) == [
("001000000000009", "001000000000000"),
("001000000000011", "001000000000002"),
]
@mock.patch("cumulusci.tasks.bulkdata.load.get_dml_operation")
def test_execute_step__record_type_mapping(self, dml_mock):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
task.session = mock.Mock()
task._load_record_types = mock.Mock()
task._process_job_results = mock.Mock()
task._query_db = mock.Mock()
task._execute_step(
MappingStep(
**{
"sf_object": "Account",
"action": "insert",
"fields": {"Name": "Name"},
}
)
)
task._load_record_types.assert_not_called()
task._execute_step(
MappingStep(
**{
"sf_object": "Account",
"action": "insert",
"fields": {"Name": "Name", "RecordTypeId": "RecordTypeId"},
}
)
)
task._load_record_types.assert_called_once_with(
["Account"], task.session.connection.return_value
)
def test_query_db__record_type_mapping(self):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
task.models = {"accounts": model}
task.metadata = mock.Mock()
task.metadata.tables = {
"Account_rt_target_mapping": mock.Mock(),
"Account_rt_mapping": mock.Mock(),
}
task.session = mock.Mock()
model.__table__ = mock.Mock()
model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
columns = {"sf_id": mock.Mock(), "name": mock.Mock()}
model.__table__.columns = columns
mapping = MappingStep(
sf_object="Account",
table="accounts",
fields={"Id": "sf_id", "Name": "name", "RecordTypeId": "RecordTypeId"},
)
task._query_db(mapping)
# Validate that the column set is accurate
task.session.query.assert_called_once_with(
model.sf_id,
model.__table__.columns["name"],
task.metadata.tables["Account_rt_target_mapping"].columns.record_type_id,
)
# Validate that we asked for the right joins on the record type tables
task.session.query.return_value.outerjoin.assert_called_once_with(
task.metadata.tables["Account_rt_mapping"], False
)
task.session.query.return_value.outerjoin.return_value.outerjoin.assert_called_once_with(
task.metadata.tables["Account_rt_target_mapping"], False
)
@mock.patch("cumulusci.tasks.bulkdata.load.automap_base")
@responses.activate
def test_init_db__record_type_mapping(self, base):
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, self.mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
def create_table_mock(table_name):
task.models[table_name] = mock.Mock()
task._create_record_type_table = mock.Mock(side_effect=create_table_mock)
task.models = mock.Mock()
task.metadata = mock.Mock()
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists = (
mock.Mock()
)
mock_describe_calls()
task._init_mapping()
task.mapping["Insert Households"]["fields"]["RecordTypeId"] = "RecordTypeId"
with task._init_db():
task._create_record_type_table.assert_called_once_with(
"Account_rt_target_mapping"
)
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists.assert_called_once_with()
def test_load_record_types(self):
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": "mapping.yml"}},
)
conn = mock.Mock()
task._extract_record_types = mock.Mock()
task._load_record_types(["Account", "Contact"], conn)
task._extract_record_types.assert_has_calls(
[
unittest.mock.call("Account", "Account_rt_target_mapping", conn),
unittest.mock.call("Contact", "Contact_rt_target_mapping", conn),
]
)
@responses.activate
@mock.patch("cumulusci.tasks.bulkdata.load.get_dml_operation")
def test_run__autopk(self, dml_mock):
responses.add(
method="GET",
url="https://example.com/services/data/v46.0/query/?q=SELECT+Id+FROM+RecordType+WHERE+SObjectType%3D%27Account%27AND+DeveloperName+%3D+%27HH_Account%27+LIMIT+1",
body=json.dumps({"records": [{"Id": "1"}]}),
status=200,
)
mapping_file = "mapping_v2.yml"
base_path = os.path.dirname(__file__)
db_path = os.path.join(base_path, "testdata.db")
mapping_path = os.path.join(base_path, mapping_file)
with temporary_dir() as d:
tmp_db_path = os.path.join(d, "testdata.db")
shutil.copyfile(db_path, tmp_db_path)
task = _make_task(
LoadData,
{
"options": {
"database_url": f"sqlite:///{tmp_db_path}",
"mapping": mapping_path,
}
},
)
task.bulk = mock.Mock()
task.sf = mock.Mock()
step = FakeBulkAPIDmlOperation(
sobject="Contact",
operation=DataOperationType.INSERT,
api_options={},
context=task,
fields=[],
)
dml_mock.return_value = step
step.results = [
DataOperationResult("001000000000000", True, None),
DataOperationResult("003000000000000", True, None),
DataOperationResult("003000000000001", True, None),
]
mock_describe_calls()
task()
assert step.records == [
["TestHousehold", "1"],
["Test", "User", "test@example.com", "001000000000000"],
["Error", "User", "error@example.com", "001000000000000"],
]
with create_engine(task.options["database_url"]).connect() as c:
hh_ids = next(c.execute("SELECT * from households_sf_ids"))
assert hh_ids == ("1", "001000000000000")
@responses.activate
def test_run__complex_lookups(self):
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
mock_describe_calls()
task._init_mapping()
assert (
task.mapping["Insert Accounts"]["lookups"]["ParentId"]["after"]
== "Insert Accounts"
)
task.models = {}
task.models["accounts"] = mock.MagicMock()
task.models["accounts"].__table__ = mock.MagicMock()
task.models["accounts"].__table__.primary_key.columns = mock.MagicMock()
task.models["accounts"].__table__.primary_key.columns.keys = mock.Mock(
return_value=["Id"]
)
task._expand_mapping()
assert (
task.mapping["Insert Accounts"]["lookups"]["ParentId"]["after"]
== "Insert Accounts"
)
@responses.activate
def test_load__inferred_keyfield_camelcase(self):
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
mock_describe_calls()
task._init_mapping()
class FakeModel:
ParentId = mock.MagicMock()
assert (
task.mapping["Insert Accounts"]["lookups"]["ParentId"].get_lookup_key_field(
FakeModel()
)
== "ParentId"
)
@responses.activate
def test_load__inferred_keyfield_snakecase(self):
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
mock_describe_calls()
task._init_mapping()
class FakeModel:
parent_id = mock.MagicMock()
assert (
task.mapping["Insert Accounts"]["lookups"]["ParentId"].get_lookup_key_field(
FakeModel()
)
== "parent_id"
)
def test_validate_org_has_person_accounts_enabled_if_person_account_data_exists__raises_exception__account(
self,
):
"""
A BulkDataException is raised because the task will (later) attempt to load
person account Account records, but the org does not have person accounts enabled
which will result in an Exception from the Bulk Data API or load records in
an unexpected state.
- ✅ An Account or Contact object is mapped
- ✅ The corresponding table includes an IsPersonAccount column
- ✅ There is at least one record in the table with IsPersonAccount equals "true"
- ✅ The org does not have person accounts enabled
"""
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
# ✅ An Account object is mapped
mapping = MappingStep(sf_object="Account", table="account")
model = mock.Mock()
model.__table__ = mock.Mock()
task.mapping = {"Mapping Step": mapping}
task.models = {mapping["table"]: model}
# ✅ The cooresponding table includes an IsPersonAccount column
task._db_has_person_accounts_column = mock.Mock(return_value=True)
# ✅ There is at least one record in the table with IsPersonAccount equals "true"
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
assert task.session.query.first.return_value is not None
# ✅ The org does not have person accounts enabled
task.org_config._is_person_accounts_enabled = False
with self.assertRaises(BulkDataException):
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists()
def test_validate_org_has_person_accounts_enabled_if_person_account_data_exists__raises_exception__contact(
self,
):
"""
A BulkDataException is raised because the task will (later) attempt to load
person account Account records, but the org does not have person accounts enabled
which will result in an Exception from the Bulk Data API or load records in
an unexpected state.
- ✅ An Account or Contact object is mapped
- ✅ The corresponding table includes an IsPersonAccount column
- ✅ There is at least one record in the table with IsPersonAccount equals "true"
- ✅ The org does not have person accounts enabled
"""
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
# ✅ A Contact object is mapped
mapping = MappingStep(sf_object="Contact", table="contact")
model = mock.Mock()
model.__table__ = mock.Mock()
task.mapping = {"Mapping Step": mapping}
task.models = {mapping["table"]: model}
# ✅ The cooresponding table includes an IsPersonAccount column
task._db_has_person_accounts_column = mock.Mock(return_value=True)
# ✅ There is at least one record in the table with IsPersonAccount equals "true"
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
assert task.session.query.first.return_value is not None
# ✅ The org does not have person accounts enabled
task.org_config._is_person_accounts_enabled = False
with self.assertRaises(BulkDataException):
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists()
def test_validate_org_has_person_accounts_enabled_if_person_account_data_exists__success_if_org_has_person_accounts_enabled(
self,
):
"""
A BulkDataException is raised because the task will (later) attempt to load
person account Account records, but the org does not have person accounts enabled
which will result in an Exception from the Bulk Data API or load records in
an unexpected state.
- ✅ An Account or Contact object is mapped
- ✅ The corresponding table includes an IsPersonAccount column
- ✅ There is at least one record in the table with IsPersonAccount equals "true"
- ❌ The org does not have person accounts enabled
"""
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
# ✅ An Account object is mapped
mapping = MappingStep(table="account", sf_object="Account")
model = mock.Mock()
model.__table__ = mock.Mock()
task.mapping = {"Mapping Step": mapping}
task.models = {mapping["table"]: model}
# ✅ The cooresponding table includes an IsPersonAccount column
task._db_has_person_accounts_column = mock.Mock(return_value=True)
# ✅ There is at least one record in the table with IsPersonAccount equals "true"
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
assert task.session.query.first.return_value is not None
# ❌ The org does has person accounts enabled
task.org_config._is_person_accounts_enabled = True
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists()
def test_validate_org_has_person_accounts_enabled_if_person_account_data_exists__success_if_no_person_account_records(
self,
):
"""
A BulkDataException is raised because the task will (later) attempt to load
person account Account records, but the org does not have person accounts enabled
which will result in an Exception from the Bulk Data API or load records in
an unexpected state.
- ✅ An Account or Contact object is mapped
- ✅ The corresponding table includes an IsPersonAccount column
- ❌ There is at least one record in the table with IsPersonAccount equals "true"
- ✅ The org does not have person accounts enabled
"""
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
# ✅ An Account object is mapped
mapping = MappingStep(sf_object="Account", table="account")
model = mock.Mock()
model.__table__ = mock.Mock()
task.mapping = {"Mapping Step": mapping}
task.models = {mapping["table"]: model}
# ✅ The cooresponding table includes an IsPersonAccount column
task._db_has_person_accounts_column = mock.Mock(return_value=True)
# ❌ There is at least one record in the table with IsPersonAccount equals "true"
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
task.session.query.first.return_value = None
assert task.session.query.first.return_value is None
# ✅ The org does has person accounts enabled
task.org_config._is_person_accounts_enabled = True
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists()
def test_validate_org_has_person_accounts_enabled_if_person_account_data_exists__success_if_no_person_account_column(
self,
):
"""
A BulkDataException is raised because the task will (later) attempt to load
person account Account records, but the org does not have person accounts enabled
which will result in an Exception from the Bulk Data API or load records in
an unexpected state.
- ✅ An Account or Contact object is mapped
- ❌ The corresponding table includes an IsPersonAccount column
- ✅ There is at least one record in the table with IsPersonAccount equals "true"
- ✅ The org does not have person accounts enabled
"""
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
# ✅ An Account object is mapped
mapping = MappingStep(sf_object="Account", table="account")
model = mock.Mock()
model.__table__ = mock.Mock()
task.mapping = {"Mapping Step": mapping}
task.models = {mapping["table"]: model}
# ❌ The cooresponding table includes an IsPersonAccount column
task._db_has_person_accounts_column = mock.Mock(return_value=False)
# ✅ There is at least one record in the table with IsPersonAccount equals "true"
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
assert task.session.query.first.return_value is not None
# ✅ The org does has person accounts enabled
task.org_config._is_person_accounts_enabled = True
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists()
def test_validate_org_has_person_accounts_enabled_if_person_account_data_exists__success_if_no_account_or_contact_not_mapped(
self,
):
"""
A BulkDataException is raised because the task will (later) attempt to load
person account Account records, but the org does not have person accounts enabled
which will result in an Exception from the Bulk Data API or load records in
an unexpected state.
- ❌ An Account or Contact object is mapped
- ✅ The corresponding table includes an IsPersonAccount column
- ✅ There is at least one record in the table with IsPersonAccount equals "true"
- ✅ The org does not have person accounts enabled
"""
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
# ❌ An Account object is mapped
mapping = MappingStep(sf_object="CustomObject__c", table="custom_object")
model = mock.Mock()
model.__table__ = mock.Mock()
task.mapping = {"Mapping Step": mapping}
task.models = {mapping["table"]: model}
# ✅ The cooresponding table includes an IsPersonAccount column
task._db_has_person_accounts_column = mock.Mock(return_value=True)
# ✅ There is at least one record in the table with IsPersonAccount equals "true"
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
assert task.session.query.first.return_value is not None
# ✅ The org does has person accounts enabled
task.org_config._is_person_accounts_enabled = True
task._validate_org_has_person_accounts_enabled_if_person_account_data_exists()
def test_db_has_person_accounts_column(self):
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
for columns, expected in [
({}, False),
({"IsPersonAccount": None}, False),
({"IsPersonAccount": "Not None"}, True),
]:
mapping = MappingStep(sf_object="Account")
model = mock.Mock()
model.__table__ = mock.Mock()
model.__table__.columns = columns
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
task.models = {}
task.models[mapping.table] = model
actual = task._db_has_person_accounts_column(mapping)
self.assertEqual(expected, actual, f"columns: {columns}")
@mock.patch("cumulusci.tasks.bulkdata.load.func.lower")
def test_filter_out_person_account_records(self, lower):
task = _make_task(
LoadData, {"options": {"database_url": "sqlite://", "mapping": "test.yml"}}
)
model = mock.Mock()
model.__table__ = mock.Mock()
IsPersonAccount_column = mock.MagicMock()
lower.return_value.__eq__ = mock.Mock()
columns = {
"sf_id": mock.Mock(),
"name": mock.Mock(),
"IsPersonAccount": IsPersonAccount_column,
}
model.__table__.columns = columns
query = mock.Mock()
expected = query.filter.return_value
actual = task._filter_out_person_account_records(query, model)
self.assertEqual(expected, actual)
lower.return_value.__eq__.assert_called_once_with("false")
query.filter.assert_called_once_with(lower.return_value.__eq__.return_value)
def test_generate_contact_id_map_for_person_accounts(self):
mapping_file = "mapping-oid.yml"
base_path = os.path.dirname(__file__)
mapping_path = os.path.join(base_path, mapping_file)
# Set task mocks
task = _make_task(
LoadData,
{"options": {"database_url": "sqlite://", "mapping": mapping_path}},
)
account_model = mock.Mock()
contact_model = mock.Mock()
task.models = {"accounts": account_model, "contacts": contact_model}
task.metadata = mock.Mock()
task.metadata.tables = {
"accounts": mock.Mock(),
"contacts": mock.Mock(),
"accounts_sf_ids": mock.Mock(),
"contacts_sf_ids": mock.Mock(),
}
task.session = mock.Mock()
task.session.query.return_value = task.session.query
task.session.query.filter.return_value = task.session.query
task.session.query.outerjoin.return_value = task.session.query
task.sf = mock.Mock()
# Set model mocks
account_model.__table__ = mock.Mock()
account_model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
account_model.__table__.columns = {
"id": mock.Mock(),
"sf_id": mock.Mock(),
"IsPersonAccount": mock.MagicMock(),
}
account_sf_ids_table = mock.Mock()
account_sf_ids_table.columns = {"id": mock.Mock(), "sf_id": mock.Mock()}
contact_model.__table__ = mock.Mock()
contact_model.__table__.primary_key.columns.keys.return_value = ["sf_id"]
contact_model.__table__.columns = {
"id": mock.Mock(),
"sf_id": mock.Mock(),
"IsPersonAccount": mock.MagicMock(),
"account_id": mock.Mock,
}
account_id_lookup = MappingLookup(
table="accounts", key_field="account_id", name="AccountId"
)
account_id_lookup.aliased_table = account_sf_ids_table
# Calculated values
contact_id_column = getattr(
contact_model, contact_model.__table__.primary_key.columns.keys()[0]
)
account_id_column = getattr(
contact_model, account_id_lookup.get_lookup_key_field(contact_model)
)
account_sf_ids_table = account_id_lookup["aliased_table"]
account_sf_id_column = account_sf_ids_table.columns["sf_id"]
contact_mapping = MappingStep(
sf_object="Contact",
table="contacts",
action=DataOperationType.UPDATE,
fields={
"Id": "sf_id",
"LastName": "LastName",
"IsPersonAccount": "IsPersonAccount",
},
lookups={"AccountId": account_id_lookup},
)
conn = mock.Mock()
conn.execution_options.return_value = conn
query_result = conn.execute.return_value
def get_random_string():
return "".join(
[random.choice(string.ascii_letters + string.digits) for n in range(18)]
)
# Set records to be queried.
chunks = [
[
{
# Table IDs
"id": get_random_string(),
# Salesforce IDs
"sf_id": get_random_string(),
"AccountId": get_random_string(),
}
for i in range(200)
],
[
{
# Table IDs
"id": get_random_string(),
# Salesforce IDs
"sf_id": get_random_string(),
"AccountId": get_random_string(),
}
for i in range(4)
],
]
expected = []
query_result.fetchmany.expected_calls = []
task.sf.query_all.expected_calls = []
for chunk in chunks:
expected.extend([(record["id"], record["sf_id"]) for record in chunk])
query_result.fetchmany.expected_calls.append(mock.call(200))
contact_ids_by_account_sf_id = {
record["AccountId"]: record["id"] for record in chunk
}
task.sf.query_all.expected_calls.append(
mock.call(
"SELECT Id, AccountId FROM Contact WHERE IsPersonAccount = true AND AccountId IN ('{}')".format(
"','".join(contact_ids_by_account_sf_id.keys())
)
)
)
chunks_index = 0
def fetchmany(batch_size):
nonlocal chunks_index
assert 200 == batch_size
# _generate_contact_id_map_for_person_accounts should break if fetchmany returns falsy.
return (
[(record["id"], record["AccountId"]) for record in chunks[chunks_index]]
if chunks_index < len(chunks)
else None
)
def query_all(query):
nonlocal chunks_index
chunk = chunks[chunks_index]
contact_ids_by_account_sf_id = {
record["AccountId"]: record["id"] for record in chunk
}
# query_all is called last; increment to next chunk
chunks_index += 1
assert (
query
== "SELECT Id, AccountId FROM Contact WHERE IsPersonAccount = true AND AccountId IN ('{}')".format(
"','".join(contact_ids_by_account_sf_id.keys())
)
)
return {
"records": [
{"Id": record["sf_id"], "AccountId": record["AccountId"]}
for record in chunk
]
}
conn.execute.return_value.fetchmany.side_effect = fetchmany
task.sf.query_all.side_effect = query_all
# Execute the test.
generator = task._generate_contact_id_map_for_person_accounts(
contact_mapping, account_id_lookup, conn
)
actual = [value for value in generator]
assert expected == actual
# Assert query executed
task.session.query.assert_called_once_with(
contact_id_column, account_sf_id_column
)
task.session.query.filter.assert_called_once()
task.session.query.outerjoin.assert_called_once_with(
account_sf_ids_table,
account_sf_ids_table.columns["id"] == account_id_column,
)
conn.execution_options.assert_called_once_with(stream_results=True)
conn.execute.assert_called_once_with(task.session.query.statement)
# Assert chunks processed
assert len(chunks) == chunks_index
query_result.fetchmany.assert_has_calls(query_result.fetchmany.expected_calls)
task.sf.query_all.assert_has_calls(task.sf.query_all.expected_calls)
@responses.activate
def test_load_memory_usage(self):
responses.add(
method="GET",
url="https://example.com/services/data/v46.0/query/?q=SELECT+Id+FROM+RecordType+WHERE+SObjectType%3D%27Account%27AND+DeveloperName+%3D+%27HH_Account%27+LIMIT+1",
body=json.dumps({"records": [{"Id": "1"}]}),
status=200,
)
base_path = os.path.dirname(__file__)
sql_path = os.path.join(base_path, "testdata.sql")
mapping_path = os.path.join(base_path, self.mapping_file)
with temporary_dir() as d:
tmp_sql_path = os.path.join(d, "testdata.sql")
shutil.copyfile(sql_path, tmp_sql_path)
class NetworklessLoadData(LoadData):
def _query_db(self, mapping):
if mapping.sf_object == "Account":
return FakeQueryResult(
((f"{i}",) for i in range(0, numrecords)), numrecords
)
elif mapping.sf_object == "Contact":
return FakeQueryResult(
(
(f"{i}", "Test☃", "User", "test@example.com", 0)
for i in range(0, numrecords)
),
numrecords,
)
def _init_task(self):
super()._init_task()
task.bulk = FakeBulkAPI()
task = _make_task(
NetworklessLoadData,
{"options": {"sql_path": tmp_sql_path, "mapping": mapping_path}},
)
numrecords = 5000
class FakeQueryResult:
def __init__(self, results, numrecords=None):
self.results = results
if numrecords is None:
numrecords = len(self.results)
self.numrecords = numrecords
def yield_per(self, number):
return self.results
def count(self):
return self.numrecords
mock_describe_calls()
def get_results(self):
return (
DataOperationResult(i, True, None) for i in range(0, numrecords)
)
def _job_state_from_batches(self, job_id):
return DataOperationJobResult(
DataOperationStatus.SUCCESS,
[],
numrecords,
0,
)
MEGABYTE = 2 ** 20
# FIXME: more anlysis about the number below
with mock.patch(
"cumulusci.tasks.bulkdata.step.BulkJobMixin._job_state_from_batches",
_job_state_from_batches,
), mock.patch(
"cumulusci.tasks.bulkdata.step.BulkApiDmlOperation.get_results",
get_results,
), assert_max_memory_usage(
15 * MEGABYTE
):
task()
| SalesforceFoundation/CumulusCI | cumulusci/tasks/bulkdata/tests/test_load.py | Python | bsd-3-clause | 81,450 |
# encoding=utf8
# (c) Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from unittest import mock
from oslo_concurrency import processutils as putils
from oslo_context import context as context_utils
from os_brick import executor as brick_executor
from os_brick.privileged import rootwrap
from os_brick.tests import base
class TestExecutor(base.TestCase):
def test_default_execute(self):
executor = brick_executor.Executor(root_helper=None)
self.assertEqual(rootwrap.execute, executor._Executor__execute)
def test_none_execute(self):
executor = brick_executor.Executor(root_helper=None, execute=None)
self.assertEqual(rootwrap.execute, executor._Executor__execute)
def test_fake_execute(self):
mock_execute = mock.Mock()
executor = brick_executor.Executor(root_helper=None,
execute=mock_execute)
self.assertEqual(mock_execute, executor._Executor__execute)
@mock.patch('sys.stdin', encoding='UTF-8')
@mock.patch('os_brick.executor.priv_rootwrap.execute')
def test_execute_non_safe_str_exception(self, execute_mock, stdin_mock):
execute_mock.side_effect = putils.ProcessExecutionError(
stdout='España', stderr='Zürich')
executor = brick_executor.Executor(root_helper=None)
exc = self.assertRaises(putils.ProcessExecutionError,
executor._execute)
self.assertEqual('Espa\xf1a', exc.stdout)
self.assertEqual('Z\xfcrich', exc.stderr)
@mock.patch('sys.stdin', encoding='UTF-8')
@mock.patch('os_brick.executor.priv_rootwrap.execute')
def test_execute_non_safe_str(self, execute_mock, stdin_mock):
execute_mock.return_value = ('España', 'Zürich')
executor = brick_executor.Executor(root_helper=None)
stdout, stderr = executor._execute()
self.assertEqual('Espa\xf1a', stdout)
self.assertEqual('Z\xfcrich', stderr)
@mock.patch('sys.stdin', encoding='UTF-8')
@mock.patch('os_brick.executor.priv_rootwrap.execute')
def test_execute_non_safe_bytes_exception(self, execute_mock, stdin_mock):
execute_mock.side_effect = putils.ProcessExecutionError(
stdout=bytes('España', 'utf-8'),
stderr=bytes('Zürich', 'utf-8'))
executor = brick_executor.Executor(root_helper=None)
exc = self.assertRaises(putils.ProcessExecutionError,
executor._execute)
self.assertEqual('Espa\xf1a', exc.stdout)
self.assertEqual('Z\xfcrich', exc.stderr)
@mock.patch('sys.stdin', encoding='UTF-8')
@mock.patch('os_brick.executor.priv_rootwrap.execute')
def test_execute_non_safe_bytes(self, execute_mock, stdin_mock):
execute_mock.return_value = (bytes('España', 'utf-8'),
bytes('Zürich', 'utf-8'))
executor = brick_executor.Executor(root_helper=None)
stdout, stderr = executor._execute()
self.assertEqual('Espa\xf1a', stdout)
self.assertEqual('Z\xfcrich', stderr)
class TestThread(base.TestCase):
def _store_context(self, result):
"""Stores current thread's context in result list."""
result.append(context_utils.get_current())
def _run_threads(self, threads):
for thread in threads:
thread.start()
for thread in threads:
thread.join()
def _do_test(self, thread_class, expected, result=None):
if result is None:
result = []
threads = [thread_class(target=self._store_context, args=[result])
for i in range(3)]
self._run_threads(threads)
self.assertEqual([expected] * len(threads), result)
def test_normal_thread(self):
"""Test normal threads don't inherit parent's context."""
context = context_utils.RequestContext()
context.update_store()
self._do_test(threading.Thread, None)
def test_no_context(self, result=None):
"""Test when parent has no context."""
context_utils._request_store.context = None
self._do_test(brick_executor.Thread, None, result)
def test_with_context(self, result=None):
"""Test that our class actually inherits the context."""
context = context_utils.RequestContext()
context.update_store()
self._do_test(brick_executor.Thread, context, result)
def _run_test(self, test_method, test_args, result):
"""Run one of the normal tests and store the result.
Meant to be run in a different thread, thus the need to store the
result, because by the time the join call completes the test's stack
is no longer available and the exception will have been lost.
"""
try:
test_method(test_args)
result.append(True)
except Exception:
result.append(False)
raise
def test_no_cross_mix(self):
"""Test there's no shared global context between threads."""
result = []
contexts = [[], [], []]
threads = [threading.Thread(target=self._run_test,
args=[self.test_with_context,
contexts[0],
result]),
threading.Thread(target=self._run_test,
args=[self.test_no_context,
contexts[1],
result]),
threading.Thread(target=self._run_test,
args=[self.test_with_context,
contexts[2],
result])]
self._run_threads(threads)
# Check that all tests run without raising an exception
self.assertEqual([True, True, True], result)
# Check that the context were not shared
self.assertNotEqual(contexts[0], contexts[2])
| openstack/os-brick | os_brick/tests/test_executor.py | Python | apache-2.0 | 6,655 |
"""
A small templating language
This implements a small templating language. This language implements
if/elif/else, for/continue/break, expressions, and blocks of Python
code. The syntax is::
{{any expression (function calls etc)}}
{{any expression | filter}}
{{for x in y}}...{{endfor}}
{{if x}}x{{elif y}}y{{else}}z{{endif}}
{{py:x=1}}
{{py:
def foo(bar):
return 'baz'
}}
{{default var = default_value}}
{{# comment}}
You use this with the ``Template`` class or the ``sub`` shortcut.
The ``Template`` class takes the template string and the name of
the template (for errors) and a default namespace. Then (like
``string.Template``) you can call the ``tmpl.substitute(**kw)``
method to make a substitution (or ``tmpl.substitute(a_dict)``).
``sub(content, **kw)`` substitutes the template immediately. You
can use ``__name='tmpl.html'`` to set the name of the template.
If there are syntax errors ``TemplateError`` will be raised.
This copy of tempita was taken from https://github.com/gjhiggins/tempita
with a few changes to remove the six dependency.
"""
from __future__ import absolute_import, division, print_function
import re
import sys
try:
from urllib.parse import quote as url_quote
from io import StringIO
from html import escape as html_escape
except ImportError:
from urllib import quote as url_quote
from cStringIO import StringIO
from cgi import escape as html_escape
import os
import tokenize
from ._looper import looper
from .compat3 import (
PY3, bytes, basestring_, next, is_unicode, coerce_text, iteritems)
__all__ = ['TemplateError', 'Template', 'sub', 'HTMLTemplate',
'sub_html', 'html', 'bunch']
in_re = re.compile(r'\s+in\s+')
var_re = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
class TemplateError(Exception):
"""Exception raised while parsing a template
"""
def __init__(self, message, position, name=None):
Exception.__init__(self, message)
self.position = position
self.name = name
def __str__(self):
msg = ' '.join(self.args)
if self.position:
msg = '%s at line %s column %s' % (
msg, self.position[0], self.position[1])
if self.name:
msg += ' in %s' % self.name
return msg
class _TemplateContinue(Exception):
pass
class _TemplateBreak(Exception):
pass
def get_file_template(name, from_template):
path = os.path.join(os.path.dirname(from_template.name), name)
return from_template.__class__.from_filename(
path, namespace=from_template.namespace,
get_template=from_template.get_template)
class Template(object):
default_namespace = {
'start_braces': '{{',
'end_braces': '}}',
'looper': looper,
}
default_encoding = 'utf8'
default_inherit = None
def __init__(self, content, name=None, namespace=None, stacklevel=None,
get_template=None, default_inherit=None, line_offset=0,
delimeters=None):
self.content = content
# set delimeters
if delimeters is None:
delimeters = (self.default_namespace['start_braces'],
self.default_namespace['end_braces'])
else:
assert len(delimeters) == 2 and all(
[isinstance(delimeter, basestring_)
for delimeter in delimeters])
self.default_namespace = self.__class__.default_namespace.copy()
self.default_namespace['start_braces'] = delimeters[0]
self.default_namespace['end_braces'] = delimeters[1]
self.delimeters = delimeters
self._unicode = is_unicode(content)
if name is None and stacklevel is not None:
try:
caller = sys._getframe(stacklevel)
except ValueError:
pass
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__file__' in globals:
name = globals['__file__']
if name.endswith('.pyc') or name.endswith('.pyo'):
name = name[:-1]
elif '__name__' in globals:
name = globals['__name__']
else:
name = '<string>'
if lineno:
name += ':%s' % lineno
self.name = name
self._parsed = parse(
content, name=name, line_offset=line_offset,
delimeters=self.delimeters)
if namespace is None:
namespace = {}
self.namespace = namespace
self.get_template = get_template
if default_inherit is not None:
self.default_inherit = default_inherit
def from_filename(cls, filename, namespace=None, encoding=None,
default_inherit=None, get_template=get_file_template):
f = open(filename, 'rb')
c = f.read()
f.close()
if encoding:
c = c.decode(encoding)
elif PY3:
c = c.decode('latin-1')
return cls(content=c, name=filename, namespace=namespace,
default_inherit=default_inherit, get_template=get_template)
from_filename = classmethod(from_filename)
def __repr__(self):
return '<%s %s name=%r>' % (
self.__class__.__name__,
hex(id(self))[2:], self.name)
def substitute(self, *args, **kw):
if args:
if kw:
raise TypeError(
"You can only give positional *or* keyword arguments")
if len(args) > 1:
raise TypeError(
"You can only give one positional argument")
if not hasattr(args[0], 'items'):
raise TypeError(
("If you pass in a single argument, you must pass in a ",
"dict-like object (with a .items() method); you gave %r")
% (args[0],))
kw = args[0]
ns = kw
ns['__template_name__'] = self.name
if self.namespace:
ns.update(self.namespace)
result, defs, inherit = self._interpret(ns)
if not inherit:
inherit = self.default_inherit
if inherit:
result = self._interpret_inherit(result, defs, inherit, ns)
return result
def _interpret(self, ns):
# __traceback_hide__ = True
parts = []
defs = {}
self._interpret_codes(self._parsed, ns, out=parts, defs=defs)
if '__inherit__' in defs:
inherit = defs.pop('__inherit__')
else:
inherit = None
return ''.join(parts), defs, inherit
def _interpret_inherit(self, body, defs, inherit_template, ns):
# __traceback_hide__ = True
if not self.get_template:
raise TemplateError(
'You cannot use inheritance without passing in get_template',
position=None, name=self.name)
templ = self.get_template(inherit_template, self)
self_ = TemplateObject(self.name)
for name, value in iteritems(defs):
setattr(self_, name, value)
self_.body = body
ns = ns.copy()
ns['self'] = self_
return templ.substitute(ns)
def _interpret_codes(self, codes, ns, out, defs):
# __traceback_hide__ = True
for item in codes:
if isinstance(item, basestring_):
out.append(item)
else:
self._interpret_code(item, ns, out, defs)
def _interpret_code(self, code, ns, out, defs):
# __traceback_hide__ = True
name, pos = code[0], code[1]
if name == 'py':
self._exec(code[2], ns, pos)
elif name == 'continue':
raise _TemplateContinue()
elif name == 'break':
raise _TemplateBreak()
elif name == 'for':
vars, expr, content = code[2], code[3], code[4]
expr = self._eval(expr, ns, pos)
self._interpret_for(vars, expr, content, ns, out, defs)
elif name == 'cond':
parts = code[2:]
self._interpret_if(parts, ns, out, defs)
elif name == 'expr':
parts = code[2].split('|')
base = self._eval(parts[0], ns, pos)
for part in parts[1:]:
func = self._eval(part, ns, pos)
base = func(base)
out.append(self._repr(base, pos))
elif name == 'default':
var, expr = code[2], code[3]
if var not in ns:
result = self._eval(expr, ns, pos)
ns[var] = result
elif name == 'inherit':
expr = code[2]
value = self._eval(expr, ns, pos)
defs['__inherit__'] = value
elif name == 'def':
name = code[2]
signature = code[3]
parts = code[4]
ns[name] = defs[name] = TemplateDef(
self, name, signature, body=parts, ns=ns, pos=pos)
elif name == 'comment':
return
else:
assert 0, "Unknown code: %r" % name
def _interpret_for(self, vars, expr, content, ns, out, defs):
# __traceback_hide__ = True
for item in expr:
if len(vars) == 1:
ns[vars[0]] = item
else:
if len(vars) != len(item):
raise ValueError(
'Need %i items to unpack (got %i items)'
% (len(vars), len(item)))
for name, value in zip(vars, item):
ns[name] = value
try:
self._interpret_codes(content, ns, out, defs)
except _TemplateContinue:
continue
except _TemplateBreak:
break
def _interpret_if(self, parts, ns, out, defs):
# __traceback_hide__ = True
# @@: if/else/else gets through
for part in parts:
assert not isinstance(part, basestring_)
name, pos = part[0], part[1]
if name == 'else':
result = True
else:
result = self._eval(part[2], ns, pos)
if result:
self._interpret_codes(part[3], ns, out, defs)
break
def _eval(self, code, ns, pos):
# __traceback_hide__ = True
try:
try:
value = eval(code, self.default_namespace, ns)
except SyntaxError as e:
raise SyntaxError(
'invalid syntax in expression: %s' % code)
return value
except:
exc_info = sys.exc_info()
e = exc_info[1]
if getattr(e, 'args', None):
arg0 = e.args[0]
else:
arg0 = coerce_text(e)
e.args = (self._add_line_info(arg0, pos),)
if PY3:
raise (e)
else:
raise (exc_info[1], e, exc_info[2])
def _exec(self, code, ns, pos):
# __traceback_hide__ = True
try:
exec (code, self.default_namespace, ns)
except:
exc_info = sys.exc_info()
e = exc_info[1]
if e.args:
e.args = (self._add_line_info(e.args[0], pos),)
else:
e.args = (self._add_line_info(None, pos),)
if PY3:
raise (e)
else:
raise (exc_info[1], e, exc_info[2])
def _repr(self, value, pos):
# __traceback_hide__ = True
try:
if value is None:
return ''
if self._unicode:
value = str(value)
if not is_unicode(value):
value = value.decode('utf-8')
else:
if not isinstance(value, basestring_):
value = coerce_text(value)
if (is_unicode(value) and self.default_encoding):
value = value.encode(self.default_encoding)
except:
exc_info = sys.exc_info()
e = exc_info[1]
e.args = (self._add_line_info(e.args[0], pos),)
if PY3:
raise (e)
else:
raise (exc_info[1], e, exc_info[2])
else:
if self._unicode and isinstance(value, bytes):
if not self.default_encoding:
raise UnicodeDecodeError(
'Cannot decode bytes value %r into unicode '
'(no default_encoding provided)' % value)
try:
value = value.decode(self.default_encoding)
except UnicodeDecodeError as e:
raise UnicodeDecodeError(
e.encoding,
e.object,
e.start,
e.end,
e.reason + ' in string %r' % value)
elif not self._unicode and is_unicode(value):
if not self.default_encoding:
raise UnicodeEncodeError(
'Cannot encode unicode value %r into bytes '
'(no default_encoding provided)' % value)
value = value.encode(self.default_encoding)
return value
def _add_line_info(self, msg, pos):
msg = "%s at line %s column %s" % (
msg, pos[0], pos[1])
if self.name:
msg += " in file %s" % self.name
return msg
def sub(content, delimeters=None, **kw):
name = kw.get('__name')
tmpl = Template(content, name=name, delimeters=delimeters)
return tmpl.substitute(kw)
def paste_script_template_renderer(content, vars, filename=None):
tmpl = Template(content, name=filename)
return tmpl.substitute(vars)
class bunch(dict):
def __init__(self, **kw):
for name, value in iteritems(kw):
setattr(self, name, value)
def __setattr__(self, name, value):
self[name] = value
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __getitem__(self, key):
if 'default' in self:
try:
return dict.__getitem__(self, key)
except KeyError:
return dict.__getitem__(self, 'default')
else:
return dict.__getitem__(self, key)
def __repr__(self):
items = [
(k, v) for k, v in iteritems(self)]
items.sort()
return '<%s %s>' % (
self.__class__.__name__,
' '.join(['%s=%r' % (k, v) for k, v in items]))
############################################################
# HTML Templating
############################################################
class html(object):
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def __html__(self):
return self.value
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__, self.value)
def html_quote(value, force=True):
if not force and hasattr(value, '__html__'):
return value.__html__()
if value is None:
return ''
if not isinstance(value, basestring_):
value = coerce_text(value)
if sys.version >= "3" and isinstance(value, bytes):
value = html_escape(value.decode('latin1'), 1)
value = value.encode('latin1')
else:
value = html_escape(value, 1)
if sys.version < "3":
if is_unicode(value):
value = value.encode('ascii', 'xmlcharrefreplace')
return value
def url(v):
v = coerce_text(v)
if is_unicode(v):
v = v.encode('utf8')
return url_quote(v)
def attr(**kw):
kw = list(iteritems(kw))
kw.sort()
parts = []
for name, value in kw:
if value is None:
continue
if name.endswith('_'):
name = name[:-1]
parts.append('%s="%s"' % (html_quote(name), html_quote(value)))
return html(' '.join(parts))
class HTMLTemplate(Template):
default_namespace = Template.default_namespace.copy()
default_namespace.update(dict(
html=html,
attr=attr,
url=url,
html_quote=html_quote))
def _repr(self, value, pos):
if hasattr(value, '__html__'):
value = value.__html__()
quote = False
else:
quote = True
plain = Template._repr(self, value, pos)
if quote:
return html_quote(plain)
else:
return plain
def sub_html(content, **kw):
name = kw.get('__name')
tmpl = HTMLTemplate(content, name=name)
return tmpl.substitute(kw)
class TemplateDef(object):
def __init__(self, template, func_name, func_signature,
body, ns, pos, bound_self=None):
self._template = template
self._func_name = func_name
self._func_signature = func_signature
self._body = body
self._ns = ns
self._pos = pos
self._bound_self = bound_self
def __repr__(self):
return '<tempita function %s(%s) at %s:%s>' % (
self._func_name, self._func_signature,
self._template.name, self._pos)
def __str__(self):
return self()
def __call__(self, *args, **kw):
values = self._parse_signature(args, kw)
ns = self._ns.copy()
ns.update(values)
if self._bound_self is not None:
ns['self'] = self._bound_self
out = []
subdefs = {}
self._template._interpret_codes(self._body, ns, out, subdefs)
return ''.join(out)
def __get__(self, obj, type=None):
if obj is None:
return self
return self.__class__(
self._template, self._func_name, self._func_signature,
self._body, self._ns, self._pos, bound_self=obj)
def _parse_signature(self, args, kw):
values = {}
sig_args, var_args, var_kw, defaults = self._func_signature
extra_kw = {}
for name, value in iteritems(kw):
if not var_kw and name not in sig_args:
raise TypeError(
'Unexpected argument %s' % name)
if name in sig_args:
values[sig_args] = value
else:
extra_kw[name] = value
args = list(args)
sig_args = list(sig_args)
while args:
while sig_args and sig_args[0] in values:
sig_args.pop(0)
if sig_args:
name = sig_args.pop(0)
values[name] = args.pop(0)
elif var_args:
values[var_args] = tuple(args)
break
else:
raise TypeError(
'Extra position arguments: %s'
% ', '.join(repr(v) for v in args))
for name, value_expr in iteritems(defaults):
if name not in values:
values[name] = self._template._eval(
value_expr, self._ns, self._pos)
for name in sig_args:
if name not in values:
raise TypeError(
'Missing argument: %s' % name)
if var_kw:
values[var_kw] = extra_kw
return values
class TemplateObject(object):
def __init__(self, name):
self.__name = name
self.get = TemplateObjectGetter(self)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__name)
class TemplateObjectGetter(object):
def __init__(self, template_obj):
self.__template_obj = template_obj
def __getattr__(self, attr):
return getattr(self.__template_obj, attr, Empty)
def __repr__(self):
return '<%s around %r>' % (
self.__class__.__name__, self.__template_obj)
class _Empty(object):
def __call__(self, *args, **kw):
return self
def __str__(self):
return ''
def __repr__(self):
return 'Empty'
def __unicode__(self):
return '' if PY3 else u''
def __iter__(self):
return iter(())
def __bool__(self):
return False
if sys.version < "3":
__nonzero__ = __bool__
Empty = _Empty()
del _Empty
############################################################
# Lexing and Parsing
############################################################
def lex(s, name=None, trim_whitespace=True, line_offset=0, delimeters=None):
if delimeters is None:
delimeters = (Template.default_namespace['start_braces'],
Template.default_namespace['end_braces'])
in_expr = False
chunks = []
last = 0
last_pos = (line_offset + 1, 1)
token_re = re.compile(r'%s|%s' % (re.escape(delimeters[0]),
re.escape(delimeters[1])))
for match in token_re.finditer(s):
expr = match.group(0)
pos = find_position(s, match.end(), last, last_pos)
if expr == delimeters[0] and in_expr:
raise TemplateError('%s inside expression' % delimeters[0],
position=pos,
name=name)
elif expr == delimeters[1] and not in_expr:
raise TemplateError('%s outside expression' % delimeters[1],
position=pos,
name=name)
if expr == delimeters[0]:
part = s[last:match.start()]
if part:
chunks.append(part)
in_expr = True
else:
chunks.append((s[last:match.start()], last_pos))
in_expr = False
last = match.end()
last_pos = pos
if in_expr:
raise TemplateError('No %s to finish last expression' % delimeters[1],
name=name, position=last_pos)
part = s[last:]
if part:
chunks.append(part)
if trim_whitespace:
chunks = trim_lex(chunks)
return chunks
lex.__doc__ = """
Lex a string into chunks:
>>> lex('hey')
['hey']
>>> lex('hey {{you}}')
['hey ', ('you', (1, 7))]
>>> lex('hey {{')
Traceback (most recent call last):
...
tempita.TemplateError: No }} to finish last expression at line 1 column 7
>>> lex('hey }}')
Traceback (most recent call last):
...
tempita.TemplateError: }} outside expression at line 1 column 7
>>> lex('hey {{ {{')
Traceback (most recent call last):
...
tempita.TemplateError: {{ inside expression at line 1 column 10
""" if PY3 else """
Lex a string into chunks:
>>> lex('hey')
['hey']
>>> lex('hey {{you}}')
['hey ', ('you', (1, 7))]
>>> lex('hey {{')
Traceback (most recent call last):
...
TemplateError: No }} to finish last expression at line 1 column 7
>>> lex('hey }}')
Traceback (most recent call last):
...
TemplateError: }} outside expression at line 1 column 7
>>> lex('hey {{ {{')
Traceback (most recent call last):
...
TemplateError: {{ inside expression at line 1 column 10
"""
statement_re = re.compile(r'^(?:if |elif |for |def |inherit |default |py:)')
single_statements = ['else', 'endif', 'endfor', 'enddef', 'continue', 'break']
trail_whitespace_re = re.compile(r'\n\r?[\t ]*$')
lead_whitespace_re = re.compile(r'^[\t ]*\n')
def trim_lex(tokens):
last_trim = None
for i in range(len(tokens)):
current = tokens[i]
if isinstance(tokens[i], basestring_):
# we don't trim this
continue
item = current[0]
if not statement_re.search(item) and item not in single_statements:
continue
if not i:
prev = ''
else:
prev = tokens[i - 1]
if i + 1 >= len(tokens):
next_chunk = ''
else:
next_chunk = tokens[i + 1]
if (not
isinstance(next_chunk, basestring_) or
not isinstance(prev, basestring_)):
continue
prev_ok = not prev or trail_whitespace_re.search(prev)
if i == 1 and not prev.strip():
prev_ok = True
if last_trim is not None and last_trim + 2 == i and not prev.strip():
prev_ok = 'last'
if (prev_ok and (not next_chunk or lead_whitespace_re.search(
next_chunk) or (
i == len(tokens) - 2 and not next_chunk.strip()))):
if prev:
if ((i == 1 and not prev.strip()) or prev_ok == 'last'):
tokens[i - 1] = ''
else:
m = trail_whitespace_re.search(prev)
# +1 to leave the leading \n on:
prev = prev[:m.start() + 1]
tokens[i - 1] = prev
if next_chunk:
last_trim = i
if i == len(tokens) - 2 and not next_chunk.strip():
tokens[i + 1] = ''
else:
m = lead_whitespace_re.search(next_chunk)
next_chunk = next_chunk[m.end():]
tokens[i + 1] = next_chunk
return tokens
trim_lex.__doc__ = r"""
Takes a lexed set of tokens, and removes whitespace when there is
a directive on a line by itself:
>>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False)
>>> tokens
[('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny']
>>> trim_lex(tokens)
[('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y']
""" if PY3 else r"""
Takes a lexed set of tokens, and removes whitespace when there is
a directive on a line by itself:
>>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False)
>>> tokens
[('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny']
>>> trim_lex(tokens)
[('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y']
"""
def find_position(string, index, last_index, last_pos):
"""
Given a string and index, return (line, column)
"""
lines = string.count('\n', last_index, index)
if lines > 0:
column = index - string.rfind('\n', last_index, index)
else:
column = last_pos[1] + (index - last_index)
return (last_pos[0] + lines, column)
def parse(s, name=None, line_offset=0, delimeters=None):
if delimeters is None:
delimeters = (Template.default_namespace['start_braces'],
Template.default_namespace['end_braces'])
tokens = lex(s, name=name, line_offset=line_offset, delimeters=delimeters)
result = []
while tokens:
next_chunk, tokens = parse_expr(tokens, name)
result.append(next_chunk)
return result
parse.__doc__ = r"""
Parses a string into a kind of AST
>>> parse('{{x}}')
[('expr', (1, 3), 'x')]
>>> parse('foo')
['foo']
>>> parse('{{if x}}test{{endif}}')
[('cond', (1, 3), ('if', (1, 3), 'x', ['test']))]
>>> parse(
... 'series->{{for x in y}}x={{x}}{{endfor}}'
... ) #doctest: +NORMALIZE_WHITESPACE
['series->',
('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])]
>>> parse('{{for x, y in z:}}{{continue}}{{endfor}}')
[('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])]
>>> parse('{{py:x=1}}')
[('py', (1, 3), 'x=1')]
>>> parse(
... '{{if x}}a{{elif y}}b{{else}}c{{endif}}'
... ) #doctest: +NORMALIZE_WHITESPACE
[('cond', (1, 3), ('if', (1, 3), 'x', ['a']),
('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))]
Some exceptions::
>>> parse('{{continue}}')
Traceback (most recent call last):
...
tempita.TemplateError: continue outside of for loop at line 1 column 3
>>> parse('{{if x}}foo')
Traceback (most recent call last):
...
tempita.TemplateError: No {{endif}} at line 1 column 3
>>> parse('{{else}}')
Traceback (most recent call last):
...
tempita.TemplateError: else outside of an if block at line 1 column 3
>>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}')
Traceback (most recent call last):
...
tempita.TemplateError: Unexpected endif at line 1 column 25
>>> parse('{{if}}{{endif}}')
Traceback (most recent call last):
...
tempita.TemplateError: if with no expression at line 1 column 3
>>> parse('{{for x y}}{{endfor}}')
Traceback (most recent call last):
...
tempita.TemplateError: Bad for (no "in") in 'x y' at line 1 column 3
>>> parse('{{py:x=1\ny=2}}') #doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
tempita.TemplateError: Multi-line py blocks must start
with a newline at line 1 column 3
""" if PY3 else r"""
Parses a string into a kind of AST
>>> parse('{{x}}')
[('expr', (1, 3), 'x')]
>>> parse('foo')
['foo']
>>> parse('{{if x}}test{{endif}}')
[('cond', (1, 3), ('if', (1, 3), 'x', ['test']))]
>>> parse(
... 'series->{{for x in y}}x={{x}}{{endfor}}'
... ) #doctest: +NORMALIZE_WHITESPACE
['series->',
('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])]
>>> parse('{{for x, y in z:}}{{continue}}{{endfor}}')
[('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])]
>>> parse('{{py:x=1}}')
[('py', (1, 3), 'x=1')]
>>> parse(
... '{{if x}}a{{elif y}}b{{else}}c{{endif}}'
... ) #doctest: +NORMALIZE_WHITESPACE
[('cond', (1, 3), ('if', (1, 3), 'x', ['a']),
('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))]
Some exceptions::
>>> parse('{{continue}}')
Traceback (most recent call last):
...
TemplateError: continue outside of for loop at line 1 column 3
>>> parse('{{if x}}foo')
Traceback (most recent call last):
...
TemplateError: No {{endif}} at line 1 column 3
>>> parse('{{else}}')
Traceback (most recent call last):
...
TemplateError: else outside of an if block at line 1 column 3
>>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}')
Traceback (most recent call last):
...
TemplateError: Unexpected endif at line 1 column 25
>>> parse('{{if}}{{endif}}')
Traceback (most recent call last):
...
TemplateError: if with no expression at line 1 column 3
>>> parse('{{for x y}}{{endfor}}')
Traceback (most recent call last):
...
TemplateError: Bad for (no "in") in 'x y' at line 1 column 3
>>> parse('{{py:x=1\ny=2}}') #doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
TemplateError: Multi-line py blocks must start
with a newline at line 1 column 3
"""
def parse_expr(tokens, name, context=()):
if isinstance(tokens[0], basestring_):
return tokens[0], tokens[1:]
expr, pos = tokens[0]
expr = expr.strip()
if expr.startswith('py:'):
expr = expr[3:].lstrip(' \t')
if expr.startswith('\n') or expr.startswith('\r'):
expr = expr.lstrip('\r\n')
if '\r' in expr:
expr = expr.replace('\r\n', '\n')
expr = expr.replace('\r', '')
expr += '\n'
else:
if '\n' in expr:
raise TemplateError(
'Multi-line py blocks must start with a newline',
position=pos, name=name)
return ('py', pos, expr), tokens[1:]
elif expr in ('continue', 'break'):
if 'for' not in context:
raise TemplateError(
'continue outside of for loop',
position=pos, name=name)
return (expr, pos), tokens[1:]
elif expr.startswith('if '):
return parse_cond(tokens, name, context)
elif (expr.startswith('elif ') or expr == 'else'):
raise TemplateError(
'%s outside of an if block' % expr.split()[0],
position=pos, name=name)
elif expr in ('if', 'elif', 'for'):
raise TemplateError(
'%s with no expression' % expr,
position=pos, name=name)
elif expr in ('endif', 'endfor', 'enddef'):
raise TemplateError(
'Unexpected %s' % expr,
position=pos, name=name)
elif expr.startswith('for '):
return parse_for(tokens, name, context)
elif expr.startswith('default '):
return parse_default(tokens, name, context)
elif expr.startswith('inherit '):
return parse_inherit(tokens, name, context)
elif expr.startswith('def '):
return parse_def(tokens, name, context)
elif expr.startswith('#'):
return ('comment', pos, tokens[0][0]), tokens[1:]
return ('expr', pos, tokens[0][0]), tokens[1:]
def parse_cond(tokens, name, context):
start = tokens[0][1]
pieces = []
context = context + ('if',)
while 1:
if not tokens:
raise TemplateError(
'Missing {{endif}}',
position=start, name=name)
if (isinstance(tokens[0], tuple) and tokens[0][0] == 'endif'):
return ('cond', start) + tuple(pieces), tokens[1:]
next_chunk, tokens = parse_one_cond(tokens, name, context)
pieces.append(next_chunk)
def parse_one_cond(tokens, name, context):
(first, pos), tokens = tokens[0], tokens[1:]
content = []
if first.endswith(':'):
first = first[:-1]
if first.startswith('if '):
part = ('if', pos, first[3:].lstrip(), content)
elif first.startswith('elif '):
part = ('elif', pos, first[5:].lstrip(), content)
elif first == 'else':
part = ('else', pos, None, content)
else:
assert 0, "Unexpected token %r at %s" % (first, pos)
while 1:
if not tokens:
raise TemplateError(
'No {{endif}}',
position=pos, name=name)
if (isinstance(tokens[0], tuple) and (
tokens[0][0] == 'endif' or tokens[0][0].startswith(
'elif ') or tokens[0][0] == 'else')):
return part, tokens
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
def parse_for(tokens, name, context):
first, pos = tokens[0]
tokens = tokens[1:]
context = ('for',) + context
content = []
assert first.startswith('for ')
if first.endswith(':'):
first = first[:-1]
first = first[3:].strip()
match = in_re.search(first)
if not match:
raise TemplateError(
'Bad for (no "in") in %r' % first,
position=pos, name=name)
vars = first[:match.start()]
if '(' in vars:
raise TemplateError(
'You cannot have () in the variable section of a for loop (%r)'
% vars, position=pos, name=name)
vars = tuple([
v.strip() for v in first[:match.start()].split(',')
if v.strip()])
expr = first[match.end():]
while 1:
if not tokens:
raise TemplateError(
'No {{endfor}}',
position=pos, name=name)
if (isinstance(tokens[0], tuple) and tokens[0][0] == 'endfor'):
return ('for', pos, vars, expr, content), tokens[1:]
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
def parse_default(tokens, name, context):
first, pos = tokens[0]
assert first.startswith('default ')
first = first.split(None, 1)[1]
parts = first.split('=', 1)
if len(parts) == 1:
raise TemplateError(
"Expression must be {{default var=value}}; no = found in %r" %
first, position=pos, name=name)
var = parts[0].strip()
if ',' in var:
raise TemplateError(
"{{default x, y = ...}} is not supported",
position=pos, name=name)
if not var_re.search(var):
raise TemplateError(
"Not a valid variable name for {{default}}: %r"
% var, position=pos, name=name)
expr = parts[1].strip()
return ('default', pos, var, expr), tokens[1:]
def parse_inherit(tokens, name, context):
first, pos = tokens[0]
assert first.startswith('inherit ')
expr = first.split(None, 1)[1]
return ('inherit', pos, expr), tokens[1:]
def parse_def(tokens, name, context):
first, start = tokens[0]
tokens = tokens[1:]
assert first.startswith('def ')
first = first.split(None, 1)[1]
if first.endswith(':'):
first = first[:-1]
if '(' not in first:
func_name = first
sig = ((), None, None, {})
elif not first.endswith(')'):
raise TemplateError("Function definition doesn't end with ): %s" %
first, position=start, name=name)
else:
first = first[:-1]
func_name, sig_text = first.split('(', 1)
sig = parse_signature(sig_text, name, start)
context = context + ('def',)
content = []
while 1:
if not tokens:
raise TemplateError(
'Missing {{enddef}}',
position=start, name=name)
if (isinstance(tokens[0], tuple) and tokens[0][0] == 'enddef'):
return ('def', start, func_name, sig, content), tokens[1:]
next_chunk, tokens = parse_expr(tokens, name, context)
content.append(next_chunk)
def parse_signature(sig_text, name, pos):
tokens = tokenize.generate_tokens(StringIO(sig_text).readline)
sig_args = []
var_arg = None
var_kw = None
defaults = {}
def get_token(pos=False):
try:
tok_type, tok_string, (srow, scol), (erow, ecol), line = next(
tokens)
except StopIteration:
return tokenize.ENDMARKER, ''
if pos:
return tok_type, tok_string, (srow, scol), (erow, ecol)
else:
return tok_type, tok_string
while 1:
var_arg_type = None
tok_type, tok_string = get_token()
if tok_type == tokenize.ENDMARKER:
break
if tok_type == tokenize.OP and (
tok_string == '*' or tok_string == '**'):
var_arg_type = tok_string
tok_type, tok_string = get_token()
if tok_type != tokenize.NAME:
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
var_name = tok_string
tok_type, tok_string = get_token()
if tok_type == tokenize.ENDMARKER or (
tok_type == tokenize.OP and tok_string == ','):
if var_arg_type == '*':
var_arg = var_name
elif var_arg_type == '**':
var_kw = var_name
else:
sig_args.append(var_name)
if tok_type == tokenize.ENDMARKER:
break
continue
if var_arg_type is not None:
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
if tok_type == tokenize.OP and tok_string == '=':
nest_type = None
unnest_type = None
nest_count = 0
start_pos = end_pos = None
parts = []
while 1:
tok_type, tok_string, s, e = get_token(True)
if start_pos is None:
start_pos = s
end_pos = e
if tok_type == tokenize.ENDMARKER and nest_count:
raise TemplateError('Invalid signature: (%s)' % sig_text,
position=pos, name=name)
if (not nest_count and
(tok_type == tokenize.ENDMARKER or
(tok_type == tokenize.OP and tok_string == ','))):
default_expr = isolate_expression(
sig_text, start_pos, end_pos)
defaults[var_name] = default_expr
sig_args.append(var_name)
break
parts.append((tok_type, tok_string))
if nest_count \
and tok_type == tokenize.OP \
and tok_string == nest_type:
nest_count += 1
elif nest_count \
and tok_type == tokenize.OP \
and tok_string == unnest_type:
nest_count -= 1
if not nest_count:
nest_type = unnest_type = None
elif not nest_count \
and tok_type == tokenize.OP \
and tok_string in ('(', '[', '{'):
nest_type = tok_string
nest_count = 1
unnest_type = {'(': ')', '[': ']', '{': '}'}[nest_type]
return sig_args, var_arg, var_kw, defaults
def isolate_expression(string, start_pos, end_pos):
srow, scol = start_pos
srow -= 1
erow, ecol = end_pos
erow -= 1
lines = string.splitlines(True)
if srow == erow:
return lines[srow][scol:ecol]
parts = [lines[srow][scol:]]
parts.extend(lines[srow + 1:erow])
if erow < len(lines):
# It'll sometimes give (end_row_past_finish, 0)
parts.append(lines[erow][:ecol])
return ''.join(parts)
_fill_command_usage = """\
%prog [OPTIONS] TEMPLATE arg=value
Use py:arg=value to set a Python value; otherwise all values are
strings.
"""
def fill_command(args=None):
import sys
import optparse
import pkg_resources
import os
if args is None:
args = sys.argv[1:]
dist = pkg_resources.get_distribution('Paste')
parser = optparse.OptionParser(
version=coerce_text(dist),
usage=_fill_command_usage)
parser.add_option(
'-o', '--output',
dest='output',
metavar="FILENAME",
help="File to write output to (default stdout)")
parser.add_option(
'--html',
dest='use_html',
action='store_true',
help="Use HTML style filling (including automatic HTML quoting)")
parser.add_option(
'--env',
dest='use_env',
action='store_true',
help="Put the environment in as top-level variables")
options, args = parser.parse_args(args)
if len(args) < 1:
print('You must give a template filename')
sys.exit(2)
template_name = args[0]
args = args[1:]
vars = {}
if options.use_env:
vars.update(os.environ)
for value in args:
if '=' not in value:
print('Bad argument: %r' % value)
sys.exit(2)
name, value = value.split('=', 1)
if name.startswith('py:'):
name = name[:3]
value = eval(value)
vars[name] = value
if template_name == '-':
template_content = sys.stdin.read()
template_name = '<stdin>'
else:
f = open(template_name, 'rb', encoding="latin-1")
template_content = f.read()
f.close()
if options.use_html:
TemplateClass = HTMLTemplate
else:
TemplateClass = Template
template = TemplateClass(template_content, name=template_name)
result = template.substitute(vars)
if options.output:
f = open(options.output, 'wb')
f.write(result)
f.close()
else:
sys.stdout.write(result)
if __name__ == '__main__':
fill_command()
| DailyActie/Surrogate-Model | 01-codes/numpy-master/tools/npy_tempita/__init__.py | Python | mit | 44,114 |
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for the module module, which contains Module and related classes."""
import os
import unittest
from py_vulcanize import fake_fs
from py_vulcanize import module
from py_vulcanize import resource_loader
from py_vulcanize import project as project_module
class ModuleIntegrationTests(unittest.TestCase):
def test_module(self):
fs = fake_fs.FakeFS()
fs.AddFile('/src/x.html', """
<!DOCTYPE html>
<link rel="import" href="/y.html">
<link rel="import" href="/z.html">
<script>
'use strict';
</script>
""")
fs.AddFile('/src/y.html', """
<!DOCTYPE html>
<link rel="import" href="/z.html">
""")
fs.AddFile('/src/z.html', """
<!DOCTYPE html>
""")
fs.AddFile('/src/py_vulcanize.html', '<!DOCTYPE html>')
with fs:
project = project_module.Project([os.path.normpath('/src/')])
loader = resource_loader.ResourceLoader(project)
x_module = loader.LoadModule('x')
self.assertEquals([loader.loaded_modules['y'],
loader.loaded_modules['z']],
x_module.dependent_modules)
already_loaded_set = set()
load_sequence = []
x_module.ComputeLoadSequenceRecursive(load_sequence, already_loaded_set)
self.assertEquals([loader.loaded_modules['z'],
loader.loaded_modules['y'],
x_module],
load_sequence)
def testBasic(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/src/my_module.html', """
<!DOCTYPE html>
<link rel="import" href="/py_vulcanize/foo.html">
});
""")
fs.AddFile('/x/py_vulcanize/foo.html', """
<!DOCTYPE html>
});
""")
project = project_module.Project([os.path.normpath('/x')])
loader = resource_loader.ResourceLoader(project)
with fs:
my_module = loader.LoadModule(module_name='src.my_module')
dep_names = [x.name for x in my_module.dependent_modules]
self.assertEquals(['py_vulcanize.foo'], dep_names)
def testDepsExceptionContext(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/src/my_module.html', """
<!DOCTYPE html>
<link rel="import" href="/py_vulcanize/foo.html">
""")
fs.AddFile('/x/py_vulcanize/foo.html', """
<!DOCTYPE html>
<link rel="import" href="missing.html">
""")
project = project_module.Project([os.path.normpath('/x')])
loader = resource_loader.ResourceLoader(project)
with fs:
exc = None
try:
loader.LoadModule(module_name='src.my_module')
assert False, 'Expected an exception'
except module.DepsException as e:
exc = e
self.assertEquals(
['src.my_module', 'py_vulcanize.foo'],
exc.context)
def testGetAllDependentFilenamesRecursive(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/y/z/foo.html', """
<!DOCTYPE html>
<link rel="import" href="/z/foo2.html">
<link rel="stylesheet" href="/z/foo.css">
<script src="/bar.js"></script>
""")
fs.AddFile('/x/y/z/foo.css', """
.x .y {
background-image: url(foo.jpeg);
}
""")
fs.AddFile('/x/y/z/foo.jpeg', '')
fs.AddFile('/x/y/z/foo2.html', """
<!DOCTYPE html>
""")
fs.AddFile('/x/raw/bar.js', 'hello')
project = project_module.Project([
os.path.normpath('/x/y'), os.path.normpath('/x/raw/')])
loader = resource_loader.ResourceLoader(project)
with fs:
my_module = loader.LoadModule(module_name='z.foo')
self.assertEquals(1, len(my_module.dependent_raw_scripts))
dependent_filenames = my_module.GetAllDependentFilenamesRecursive()
self.assertEquals(
[
os.path.normpath('/x/y/z/foo.html'),
os.path.normpath('/x/raw/bar.js'),
os.path.normpath('/x/y/z/foo.css'),
os.path.normpath('/x/y/z/foo.jpeg'),
os.path.normpath('/x/y/z/foo2.html'),
],
dependent_filenames)
| SummerLW/Perf-Insight-Report | third_party/py_vulcanize/py_vulcanize/module_unittest.py | Python | bsd-3-clause | 4,004 |
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mdadm installation and cleanup functions."""
def _Install(vm):
"""Installs the mdadm package on the VM."""
vm.InstallPackages('mdadm')
def YumInstall(vm):
"""Installs the mdadm package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the mdadm package on the VM."""
_Install(vm)
def SwupdInstall(vm):
vm.InstallPackages('storage-utils')
| GoogleCloudPlatform/PerfKitBenchmarker | perfkitbenchmarker/linux_packages/mdadm.py | Python | apache-2.0 | 1,002 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SubnetsOperations(object):
"""SubnetsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified subnet.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.Subnet"
"""Gets the specified subnet by virtual network and resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Subnet, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.Subnet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
subnet_parameters, # type: "_models.Subnet"
**kwargs # type: Any
):
# type: (...) -> "_models.Subnet"
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(subnet_parameters, 'Subnet')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Subnet', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
subnet_parameters, # type: "_models.Subnet"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.Subnet"]
"""Creates or updates a subnet in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param subnet_parameters: Parameters supplied to the create or update subnet operation.
:type subnet_parameters: ~azure.mgmt.network.v2019_07_01.models.Subnet
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either Subnet or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_07_01.models.Subnet]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subnet_parameters=subnet_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
def _prepare_network_policies_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
prepare_network_policies_request_parameters, # type: "_models.PrepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._prepare_network_policies_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(prepare_network_policies_request_parameters, 'PrepareNetworkPoliciesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_prepare_network_policies_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/PrepareNetworkPolicies'} # type: ignore
def begin_prepare_network_policies(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
prepare_network_policies_request_parameters, # type: "_models.PrepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Prepares a subnet by applying network intent policies.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param prepare_network_policies_request_parameters: Parameters supplied to prepare subnet by
applying network intent policies.
:type prepare_network_policies_request_parameters: ~azure.mgmt.network.v2019_07_01.models.PrepareNetworkPoliciesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._prepare_network_policies_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
prepare_network_policies_request_parameters=prepare_network_policies_request_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_prepare_network_policies.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/PrepareNetworkPolicies'} # type: ignore
def _unprepare_network_policies_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
unprepare_network_policies_request_parameters, # type: "_models.UnprepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._unprepare_network_policies_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(unprepare_network_policies_request_parameters, 'UnprepareNetworkPoliciesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_unprepare_network_policies_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/UnprepareNetworkPolicies'} # type: ignore
def begin_unprepare_network_policies(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
subnet_name, # type: str
unprepare_network_policies_request_parameters, # type: "_models.UnprepareNetworkPoliciesRequest"
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Unprepares a subnet by removing network intent policies.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param unprepare_network_policies_request_parameters: Parameters supplied to unprepare subnet
to remove network intent policies.
:type unprepare_network_policies_request_parameters: ~azure.mgmt.network.v2019_07_01.models.UnprepareNetworkPoliciesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._unprepare_network_policies_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
unprepare_network_policies_request_parameters=unprepare_network_policies_request_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_unprepare_network_policies.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/UnprepareNetworkPolicies'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.SubnetListResult"]
"""Gets all subnets in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SubnetListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.SubnetListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SubnetListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SubnetListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/operations/_subnets_operations.py | Python | mit | 35,774 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPyparsing(PythonPackage):
"""A Python Parsing Module."""
homepage = "http://pyparsing.wikispaces.com/"
url = "https://pypi.io/packages/source/p/pyparsing/pyparsing-2.2.0.tar.gz"
import_modules = ['pyparsing']
version('2.2.0', '0214e42d63af850256962b6744c948d9')
version('2.1.10', '065908b92904e0d3634eb156f44cc80e')
version('2.0.3', '0fe479be09fc2cf005f753d3acc35939')
patch('setuptools-import.patch', when='@:2.1.10')
# Newer versions of setuptools require pyparsing. Although setuptools is an
# optional dependency of pyparsing, if it is not found, setup.py will
# fallback on distutils.core instead. Don't add a setuptools dependency
# or we won't be able to bootstrap setuptools.
# depends_on('py-setuptools', type='build')
| EmreAtes/spack | var/spack/repos/builtin/packages/py-pyparsing/package.py | Python | lgpl-2.1 | 2,057 |
# Copyright 2017 BrainPad Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import logging
import os
from gensim.models import Word2Vec
import numpy as np
from scipy import spatial
from candysorter.ext.google.cloud import language
logger = logging.getLogger(__name__)
class TextAnalyzer(object):
_LANG_TO_NLAPI_LANG = {
'en': 'en-US',
'ja': 'ja',
}
def __init__(self, params_file, model_files, pos_weights):
self.params_file = params_file
self.model_files = model_files
self.pos_weights = pos_weights
self.labels = None
self.models = None
self.language_client = language.Client()
@classmethod
def from_config(cls, config):
return cls(params_file=os.path.join(config.CLASSIFIER_MODEL_DIR, 'params.json'),
model_files=config.WORD2VEC_MODEL_FILES,
pos_weights=config.POS_WEIGHTS)
def init(self):
self._load_models()
self._load_labels()
def reload(self):
self._load_labels()
def _load_models(self):
self.models = {}
for l, v in self.model_files.items():
logger.info('Loading %s word2vec model...', l)
self.models[l] = Word2Vec.load_word2vec_format(v['file'], binary=v['binary'])
logger.info('Finished %s loading word2vec model.', l)
def _load_labels(self):
logger.info('Loading labels...')
with open(self.params_file) as f:
self.labels = json.load(f)['labels']
logger.info('Finished loading labels.')
def _to_nlapi_lang(self, lang):
return self._LANG_TO_NLAPI_LANG.get(lang, 'en-US')
def analyze_syntax(self, text, lang='en'):
document = self.language_client.document_from_text(
text, language=self._to_nlapi_lang(lang))
return document.analyze_syntax()
def calc_similarities(self, tokens, lang='en'):
t_v = self._tokens_vector(tokens, lang)
return np.array([
1. - spatial.distance.cosine(t_v, l_v)
for l_v in self._label_vectors(lang)
])
def _tokens_vector(self, tokens, lang):
model = self.models[lang]
_tokens = [(t.lemma.lower(), t.pos.tag) for t in tokens]
_tokens = [t for t in _tokens if t[0] in model]
# no words
if not _tokens:
return (np.random.rand(model.vector_size) - 0.5) / model.vector_size
# valid tokens
valids = [t for t in _tokens if self._pos_weight(t[1]) > 0.]
if valids:
return sum([model[w] * self._pos_weight(p) for w, p in valids]) / len(valids)
# all tokens
return sum([model[w] for w, _ in _tokens]) / len(_tokens)
def _label_vectors(self, lang):
model = self.models[lang]
vectors = []
for l in self.labels:
# e.g. 'SWEET CHOCOLATE' -> ['sweet', 'chocolate']
words = [w.lower() for w in l.split(' ')]
words = [w for w in words if w in model]
if not words:
v = (np.random.rand(model.vector_size) - 0.5) / model.vector_size
else:
v = sum([model[w] for w in words]) / len(words)
vectors.append(v)
return vectors
def _pos_weight(self, pos):
return self.pos_weights.get(pos, 0.)
class FakeTextAnalyzer(TextAnalyzer):
def init(self):
logger.info('*** %s loaded. ***', self.__class__.__name__)
self._load_labels()
def calc_similarities(self, words, lang='en'):
return np.linspace(0.9, 0.1, len(self.labels))
| BrainPad/FindYourCandy | webapp/candysorter/models/texts.py | Python | apache-2.0 | 4,296 |
#!/usr/bin/env python
"""
Draw a graph of the net architecture.
"""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from google.protobuf import text_format
import caffe
import caffe.draw
from caffe.proto import caffe_pb2
def parse_args():
"""Parse input arguments
"""
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('input_net_proto_file',
help='Input network prototxt file')
parser.add_argument('output_image_file',
help='Output image file')
parser.add_argument('--rankdir',
help=('One of TB (top-bottom, i.e., vertical), '
'RL (right-left, i.e., horizontal), or another '
'valid dot option; see '
'http://www.graphviz.org/doc/info/'
'attrs.html#k:rankdir'),
default='LR')
parser.add_argument('--phase',
help=('Which network phase to draw: can be TRAIN, '
'TEST, or ALL. If ALL, then all layers are drawn '
'regardless of phase.'),
default="ALL")
args = parser.parse_args()
return args
def main():
args = parse_args()
net = caffe_pb2.NetParameter()
text_format.Merge(open(args.input_net_proto_file).read(), net)
print('Drawing net to %s' % args.output_image_file)
phase=None;
if args.phase == "TRAIN":
phase = caffe.TRAIN
elif args.phase == "TEST":
phase = caffe.TEST
elif args.phase != "ALL":
raise ValueError("Unknown phase: " + args.phase)
caffe.draw.draw_net_to_file(net, args.output_image_file, args.rankdir,
phase)
if __name__ == '__main__':
main()
| aayushbansal/PixelNet | tools/caffe/python/draw_net.py | Python | mit | 1,934 |
"""
This recipe calculates statistics on values of a raster within the zones of a
vector dataset. It returns for each feature a dictionary item (FID) with the
statistical values in the following order:
Average, Mean, Medain, Standard, Deviation, Variance
While this recipe works and is a good example, it is generally recommended to use
[rasterstats](https://github.com/perrygeo/python-raster-stats) for calculating
zonal statistics with Python.
"""
import gdal, ogr, osr, numpy
import sys
def zonal_grabs(feat, input_zone_polygon, input_value_raster):
# Open data
raster = gdal.Open(input_value_raster)
shp = ogr.Open(input_zone_polygon)
lyr = shp.GetLayer()
# Get raster georeference info
transform = raster.GetGeoTransform()
xOrigin = transform[0]
yOrigin = transform[3]
pixelWidth = transform[1]
pixelHeight = transform[5]
# Reproject vector geometry to same projection as raster
sourceSR = lyr.GetSpatialRef()
targetSR = osr.SpatialReference()
targetSR.ImportFromWkt(raster.GetProjectionRef())
coordTrans = osr.CoordinateTransformation(sourceSR,targetSR)
feat = lyr.GetNextFeature()
geom = feat.GetGeometryRef()
geom.Transform(coordTrans)
# Get extent of feat
geom = feat.GetGeometryRef()
if (geom.GetGeometryName() == 'MULTIPOLYGON'):
count = 0
pointsX = []; pointsY = []
for polygon in geom:
geomInner = geom.GetGeometryRef(count)
ring = geomInner.GetGeometryRef(0)
numpoints = ring.GetPointCount()
for p in range(numpoints):
lon, lat, z = ring.GetPoint(p)
pointsX.append(lon)
pointsY.append(lat)
count += 1
elif (geom.GetGeometryName() == 'POLYGON'):
ring = geom.GetGeometryRef(0)
numpoints = ring.GetPointCount()
pointsX = []; pointsY = []
for p in range(numpoints):
lon, lat, z = ring.GetPoint(p)
pointsX.append(lon)
pointsY.append(lat)
else:
print "ERROR: Geometry needs to be either Polygon or Multipolygon"
sys.exit(998)
xmin = min(pointsX)
xmax = max(pointsX)
ymin = min(pointsY)
ymax = max(pointsY)
# Specify offset and rows and columns to read
xoff = int((xmin - xOrigin)/pixelWidth)
yoff = int((yOrigin - ymax)/pixelWidth)
xcount = int((xmax - xmin)/pixelWidth)+1
ycount = int((ymax - ymin)/pixelWidth)+1
# Create memory target raster
target_ds = gdal.GetDriverByName('MEM').Create('', xcount, ycount, 1, gdal.GDT_Byte)
target_ds.SetGeoTransform((
xmin, pixelWidth, 0,
ymax, 0, pixelHeight,
))
# Create for target raster the same projection as for the value raster
raster_srs = osr.SpatialReference()
raster_srs.ImportFromWkt(raster.GetProjectionRef())
target_ds.SetProjection(raster_srs.ExportToWkt())
# Rasterize zone polygon to raster
gdal.RasterizeLayer(target_ds, [1], lyr, burn_values=[1])
# Read raster as arrays
banddataraster = raster.GetRasterBand(1)
dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.float)
bandmask = target_ds.GetRasterBand(1)
datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float)
# Mask zone of raster
zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask))
##print " - ZR {}".format(zoneraster)
for row in zoneraster:
print "!",
for col in row:
print col,
print
print "!!"
# Calculate statistics of zonal raster
return numpy.mean(zoneraster),numpy.median(zoneraster),numpy.std(zoneraster)
def loop_zonal_grabs(input_zone_polygon, input_value_raster):
shp = ogr.Open(input_zone_polygon)
lyr = shp.GetLayer()
featList = range(lyr.GetFeatureCount())
statDict = {}
for FID in featList:
feat = lyr.GetFeature(FID)
meanValue = zonal_grabs(feat, input_zone_polygon, input_value_raster)
statDict[FID] = meanValue
print " - FID: {} VAL: {}".format(FID, meanValue)
return statDict
def main(input_zone_polygon, input_value_raster):
return loop_zonal_grabs(input_zone_polygon, input_value_raster)
if __name__ == "__main__":
#
# Returns for each feature a dictionary item (FID) with listed values inside the zone
#
# example run : $ python grid.py <full-path><output-shapefile-name>.shp xmin xmax ymin ymax gridHeight gridWidth
#
if len( sys.argv ) != 3:
print "[ ERROR ] you must supply two arguments: input-zone-shapefile-name.shp input-value-raster-name.tif "
sys.exit(999)
print 'Returns for each feature a dictionary item FID, with the statistical values in the following order: Mean, Medain, Standard Deviation'
print main( sys.argv[1], sys.argv[2] )
| EC-software/EC_stuff | gdal_stuff/grab_zonal_values.py | Python | gpl-2.0 | 4,909 |
from typing import overload
class A:
@overload
def bar(self, value: str) -> None:
pass
@overload
def bar(self, value: int) -> str:
pass
def bar(self, value):
return None
A().bar("") | allotria/intellij-community | python/testData/refactoring/rename/overloadsAndImplementationInClassRenameCall_after.py | Python | apache-2.0 | 231 |
"""The climate tests for the venstar integration."""
from unittest.mock import patch
from homeassistant.components.climate.const import (
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
)
from .util import async_init_integration, mock_venstar_devices
EXPECTED_BASE_SUPPORTED_FEATURES = (
SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE
)
@mock_venstar_devices
async def test_colortouch(hass):
"""Test interfacing with a venstar colortouch with attached humidifier."""
with patch("homeassistant.components.venstar.VENSTAR_SLEEP", new=0):
await async_init_integration(hass)
state = hass.states.get("climate.colortouch")
assert state.state == "heat"
expected_attributes = {
"hvac_modes": ["heat", "cool", "off", "auto"],
"min_temp": 7,
"max_temp": 35,
"min_humidity": 0,
"max_humidity": 60,
"fan_modes": ["on", "auto"],
"preset_modes": ["none", "away", "temperature"],
"current_temperature": 21.0,
"temperature": 20.5,
"current_humidity": 41,
"humidity": 30,
"fan_mode": "auto",
"hvac_action": "idle",
"preset_mode": "temperature",
"fan_state": 0,
"hvac_mode": 0,
"friendly_name": "COLORTOUCH",
"supported_features": EXPECTED_BASE_SUPPORTED_FEATURES
| SUPPORT_TARGET_HUMIDITY,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
@mock_venstar_devices
async def test_t2000(hass):
"""Test interfacing with a venstar T2000 presently turned off."""
with patch("homeassistant.components.venstar.VENSTAR_SLEEP", new=0):
await async_init_integration(hass)
state = hass.states.get("climate.t2000")
assert state.state == "off"
expected_attributes = {
"hvac_modes": ["heat", "cool", "off", "auto"],
"min_temp": 7,
"max_temp": 35,
"fan_modes": ["on", "auto"],
"preset_modes": ["none", "away", "temperature"],
"current_temperature": 14.0,
"temperature": None,
"fan_mode": "auto",
"hvac_action": "idle",
"preset_mode": "temperature",
"fan_state": 0,
"hvac_mode": 0,
"friendly_name": "T2000",
"supported_features": EXPECTED_BASE_SUPPORTED_FEATURES,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
| home-assistant/home-assistant | tests/components/venstar/test_climate.py | Python | apache-2.0 | 2,706 |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility back to Python 2.5 and (currently) has significant performance
advantages, even without using the optional C extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
>>> print(json.dumps(u'\u1234'))
"\u1234"
>>> print(json.dumps('\\'))
"\\"
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
>>> from simplejson.compat import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> obj = [1,2,3,{'4': 5, '6': 7}]
>>> json.dumps(obj, separators=(',',':'), sort_keys=True)
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' '))
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from simplejson.compat import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 3 (char 2)
"""
from __future__ import absolute_import
__version__ = '3.11.1'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict', 'simple_first',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decimal import Decimal
from .scanner import JSONDecodeError
from .decoder import JSONDecoder
from .encoder import JSONEncoder, JSONEncoderForHTML, RawJSON
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
from . import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from ._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
iterable_as_array=False,
bigint_as_string=False,
item_sort_key=None,
for_json=False,
ignore_nan=False,
int_as_string_bitcount=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, int_as_string_bitcount=None,
iterable_as_array=False, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If *skipkeys* is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If *ensure_ascii* is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If *check_circular* is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If *allow_nan* is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the original JSON specification, instead of using
the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See
*ignore_nan* for ECMA-262 compliant behavior.
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, *separators* should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
*encoding* is the character encoding for str instances, default is UTF-8.
*default(obj)* is a function that should return a serializable version
of obj or raise ``TypeError``. The default simply raises ``TypeError``.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise. Note that this is still a
lossy operation that will not round-trip correctly and should be used
sparingly.
If *int_as_string_bitcount* is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
of subclassing whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not iterable_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and int_as_string_bitcount is None
and not kw
):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
iterable_as_array=iterable_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
int_as_string_bitcount=int_as_string_bitcount,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, int_as_string_bitcount=None,
iterable_as_array=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, ``separators`` should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If *bigint_as_string* is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If *int_as_string_bitcount* is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not iterable_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and int_as_string_bitcount is None
and not kw
):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
iterable_as_array=iterable_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
int_as_string_bitcount=int_as_string_bitcount,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
from . import decoder as dec
from . import encoder as enc
from . import scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def simple_first(kv):
"""Helper function to pass to item_sort_key to sort simple
elements to the top, then container elements.
"""
return (isinstance(kv[1], (list, dict, tuple)), kv[0])
| NeostreamTechnology/Microservices | venv/lib/python2.7/site-packages/simplejson/__init__.py | Python | mit | 23,698 |
from distutils.core import setup
from Cython.Build import cythonize
setup(
setup_requires=[
'cython>=0.x',
],
ext_modules=cythonize(
["pytset.pyx", "tset.cpp"],
language="c++",
),
)
| jmnybl/setlib | setup.py | Python | gpl-2.0 | 223 |
# Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from myDevices.utils.types import toint
from myDevices.devices.i2c import I2C
from myDevices.devices.digital import GPIOPort
class PCF8574(I2C, GPIOPort):
FUNCTIONS = [GPIOPort.IN for i in range(8)]
def __init__(self, slave=0x20):
slave = toint(slave)
if slave in range(0x20, 0x28):
self.name = "PCF8574"
elif slave in range(0x38, 0x40):
self.name = "PCF8574A"
else:
raise ValueError("Bad slave address for PCF8574(A) : 0x%02X not in range [0x20..0x27, 0x38..0x3F]" % slave)
I2C.__init__(self, slave)
GPIOPort.__init__(self, 8)
self.portWrite(0xFF)
self.portRead()
def __str__(self):
return "%s(slave=0x%02X)" % (self.name, self.slave)
def __getFunction__(self, channel):
return self.FUNCTIONS[channel]
def __setFunction__(self, channel, value):
if not value in [self.IN, self.OUT]:
raise ValueError("Requested function not supported")
self.FUNCTIONS[channel] = value
def __digitalRead__(self, channel):
mask = 1 << channel
d = self.readByte()
return (d & mask) == mask
def __portRead__(self):
return self.readByte()
def __digitalWrite__(self, channel, value):
mask = 1 << channel
b = self.readByte()
if value:
b |= mask
else:
b &= ~mask
self.writeByte(b)
def __portWrite__(self, value):
self.writeByte(value)
class PCF8574A(PCF8574):
def __init__(self, slave=0x38):
PCF8574.__init__(self, slave)
| myDevicesIoT/Cayenne-Agent | myDevices/devices/digital/pcf8574.py | Python | mit | 2,274 |
import math
def max_sub_array(array, begin=None, end=None):
def max_sub_array_mid(arr, begin_m, end_m, middle):
l_sum, l_max_index, l_max_sum = 0, None, None
l_local = middle - 1
while l_local >= begin_m:
l_sum += arr[l_local]
if l_max_index is None:
l_max_index = l_local
l_max_sum = arr[l_local]
elif l_sum > l_max_sum:
l_max_index = l_local
l_max_sum = l_sum
l_local -= 1
r_sum, r_max_index, r_max_sum = 0, None, None
r_local = middle
while r_local < end_m:
r_sum += arr[r_local]
if r_max_index is None:
r_max_index = r_local + 1
r_max_sum = arr[r_local]
elif r_sum > r_max_sum:
r_max_index = r_local + 1
r_max_sum = r_sum
r_local += 1
if l_max_sum is None:
l_max_sum = 0
l_max_index = middle
if r_max_sum is None:
r_max_sum = 0
r_max_index = middle
ret = dict()
ret['sum'] = l_max_sum + r_max_sum
ret['begin'] = l_max_index
ret['end'] = r_max_index
return ret
if begin is None and end is None:
begin = 0
end = len(array)
res = dict()
if begin + 1 == end:
res['begin'] = begin
res['end'] = end
res['sum'] = array[begin]
return res
if begin == end:
res['begin'] = begin
res['end'] = end
res['sum'] = 0
return res
mid = math.floor((begin + end) / 2)
l = max_sub_array(array, begin, mid)
r = max_sub_array(array, mid, end)
m = max_sub_array_mid(array, begin, end, mid)
if l['sum'] >= r['sum'] and l['sum'] >= m['sum']:
return l
elif r['sum'] >= l['sum'] and r['sum'] >= m['sum']:
return r
else:
return m
if __name__ == '__main__':
test_list = [13, -3, -25, 20, -3, -16, -23, 18, 20, -7, 12, -5, -22, 15, -4, 7]
result = max_sub_array(test_list)
print('begin :', result['begin'], 'end:', result['end'], 'sum:', result['sum'])
| ShengGuangzhi/SummerTree | algorithm/python_version/basic/max_sub_array.py | Python | mit | 2,257 |
#!/usr/bin/env python
#=========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#=========================================================================
from __future__ import print_function
import SimpleITK as sitk
import sys
import os
if len ( sys.argv ) != 4:
print( "Usage: %s inputImage sliceNumber outputImage" % ( sys.argv[0] ) )
sys.exit ( 1 )
zslice = int( sys.argv[2] )
inputImage = sitk.ReadImage( str(sys.argv[1]) )
size = list( inputImage.GetSize() )
size[2] = 0
index = [ 0, 0, zslice ]
Extractor = sitk.ExtractImageFilter()
Extractor.SetSize( size )
Extractor.SetIndex( index )
sitk.WriteImage( Extractor.Execute( inputImage ), str(sys.argv[3]) )
if ( not "SITK_NOSHOW" in os.environ ):
sitk.Show( Extractor.Execute( inputImage ) )
| prasadvagdargi/med_image_analysis | Examples/Python/ExtractSlice.py | Python | apache-2.0 | 1,397 |
#-*- coding:utf-8 -*-
import dircache, os, math
from PIL import Image
from psd_tools import PSDImage
from psd_tools import Group
import json
class Rect:
width = 0
height = 0
x = 0
y = 0
name = ""
class MergeImg:
sizes = [16, 32, 64, 128, 256, 512, 1024, 2048, 4096]
div = 1 #间隔像素
width = 0
height = 0
imgs = []
total_arena = 0
def __init__(self):
pass
def set_imgs(self, imgs):
self.imgs = imgs
def caculate_arena(self):
for img in self.imgs:
w, h = img.width, img.height
self.total_arena = self.total_arena + w * h
print "total_arena " + self.total_arena
def get_max_arena(self):
arena = 0
rst = None
for rect in self.imgs:
a = rect.width * rect.height
if a > arena:
arena = a
rst = rect
return rst
def get_max_width(self):
w = 0
rst = None
for rect in self.imgs:
a = rect.width
if a > w:
w = a
rst = rect
return rst
def get_max_height(self):
h = 0
rst = None
for rect in self.imgs:
a = rect.height
if a > h:
h = a
rst = rect
return rst
def merge(self):
w = math.sqrt(self.total_arena)
for i in self.sizes:
if i >= w:
w = i
break
class MergeTool:
#原图目录
res_path = "E:/Temp/abc"
#生成的图集存放目录
output_path = "E:/Temp"
cells = []
total_arena = 0
MAX_ARENA = 2048 * 2048
def __init__(self):
pass
def begin(self):
files = dircache.listdir(self.res_path)
for f in files:
p = self.res_path + '/' + f
img = Image.open(p)
self.cells.append(img)
img_width, img_height = img.size
self.total_arena = self.total_arena + img_width * img_height
print self.total_arena
if __name__ == "__main__":
d = MergeTool()
d.begin() | hookehu/utility | max_rects.py | Python | gpl-2.0 | 1,716 |
#-*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('oauth2_authorization_server.account',
(r'^login/?$', 'login'),
(r'^logout/?$', 'logout'),
(r'^signup/?$', 'signup'),
(r'^clients/?$', 'clients'),
)
| RaduGatej/SensibleData-Platform | sensible_data_platform/oauth2_authorization_server/account_urls.py | Python | mit | 328 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.utils
import json
from frappe.utils import cstr, flt, getdate, comma_and
from frappe import _
from frappe.model.mapper import get_mapped_doc
from erpnext.controllers.selling_controller import SellingController
form_grid_templates = {
"sales_order_details": "templates/form_grid/item_grid.html"
}
class SalesOrder(SellingController):
tname = 'Sales Order Item'
fname = 'sales_order_details'
person_tname = 'Target Detail'
partner_tname = 'Partner Target Detail'
territory_tname = 'Territory Target Detail'
def validate_mandatory(self):
# validate transaction date v/s delivery date
if self.delivery_date:
if getdate(self.transaction_date) > getdate(self.delivery_date):
frappe.throw(_("Expected Delivery Date cannot be before Sales Order Date"))
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date and self.delivery_date and getdate(self.po_date) > getdate(self.delivery_date):
frappe.throw(_("Expected Delivery Date cannot be before Purchase Order Date"))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0]:
frappe.msgprint(_("Warning: Sales Order {0} already exists against same Purchase Order number").format(so[0][0]))
def validate_for_items(self):
check_list, flag = [], 0
chk_dupl_itm = []
for d in self.get('sales_order_details'):
e = [d.item_code, d.description, d.warehouse, d.prevdoc_docname or '']
f = [d.item_code, d.description]
if frappe.db.get_value("Item", d.item_code, "is_stock_item") == 'Yes':
if not d.warehouse:
frappe.throw(_("Reserved warehouse required for stock item {0}").format(d.item_code))
if e in check_list:
frappe.throw(_("Item {0} has been entered twice").format(d.item_code))
else:
check_list.append(e)
else:
if f in chk_dupl_itm:
frappe.throw(_("Item {0} has been entered twice").format(d.item_code))
else:
chk_dupl_itm.append(f)
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code,d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
def validate_sales_mntc_quotation(self):
for d in self.get('sales_order_details'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s", (d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}").format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
# if self.order_type == 'Sales' and not self.delivery_date:
# frappe.throw(_("Please enter 'Expected Delivery Date'"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project_name and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project_name, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project_name))
def validate(self):
frappe.errprint("in the validate of sales order")
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_mandatory()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "qty")
self.validate_for_items()
self.validate_warehouse()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self,'sales_order_details')
self.validate_with_previous_doc()
if not self.status:
self.status = "Draft"
from erpnext.utilities import validate_status
validate_status(self.status, ["Draft", "Submitted", "Stopped",
"Cancelled"])
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_warehouse(self):
from erpnext.stock.utils import validate_warehouse_company
warehouses = list(set([d.warehouse for d in
self.get(self.fname) if d.warehouse]))
for w in warehouses:
validate_warehouse_company(w, self.company)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc(self.tname, {
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0]))
def update_prevdoc_status(self, flag):
for quotation in list(set([d.prevdoc_docname for d in self.get(self.fname)])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
def on_submit(self):
self.update_stock_ledger(update_stock = 1)
self.check_credit(self.grand_total)
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.grand_total, self)
self.update_prevdoc_status('submit')
frappe.db.set(self, 'status', 'Submitted')
# """for email send"""
# self.send_email()
def send_email(self):
from frappe.utils.user import get_user_fullname
# # from frappe.utils import get_url
# # mail_titles = frappe.get_hooks().get("login_mail_title", [])
# title = frappe.db.get_default('company') or (mail_titles and mail_titles[0]) or ""
# full_name = get_user_fullname(frappe.session['user'])
# if full_name == "Guest":
# full_name = "Administrator"
email=frappe.db.sql("""select email_id from `tabAddress` where customer='%s'"""%(self.customer),as_list=1)
frappe.errprint(email[0][0])
message = frappe.db.sql_list("""select message from `tabTemplate Types`
where event_type='Sales Order Submit'""")
frappe.errprint(message[0])
# frappe.errprint(message[0].format(self.first_name or self.last_name or "user",link,self.name,full_name))
sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
frappe.sendmail(recipients=email[0][0], sender=sender, subject='Get your business online with TailorPad',
message=message[0].format(self.customer,self.name,self.transaction_date,self.grand_total))
# frappe.throw(_("""Approval Status must be 'Approved' or 'Rejected'"""))
def on_cancel(self):
# Cannot cancel stopped SO
if self.status == 'Stopped':
frappe.throw(_("Stopped order cannot be cancelled. Unstop to cancel."))
self.check_nextdoc_docstatus()
self.update_stock_ledger(update_stock = -1)
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv)))
#check maintenance schedule
submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1,
`tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""", self.name)
if submit_ms:
frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms)))
# check maintenance visit
submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name)
if submit_mv:
frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv)))
# check production order
pro_order = frappe.db.sql_list("""select name from `tabProduction Order`
where sales_order = %s and docstatus = 1""", self.name)
if pro_order:
frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order)))
def check_modified_date(self):
mod_db = frappe.db.get_value("Sales Order", self.name, "modified")
date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" %
( mod_db, cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name))
def stop_sales_order(self):
self.check_modified_date()
self.update_stock_ledger(-1)
frappe.db.set(self, 'status', 'Stopped')
frappe.msgprint(_("{0} {1} status is Stopped").format(self.doctype, self.name))
def unstop_sales_order(self):
self.check_modified_date()
self.update_stock_ledger(1)
frappe.db.set(self, 'status', 'Submitted')
frappe.msgprint(_("{0} {1} status is Unstopped").format(self.doctype, self.name))
def update_stock_ledger(self, update_stock):
from erpnext.stock.utils import update_bin
for d in self.get_item_list():
if frappe.db.get_value("Item", d['item_code'], "is_stock_item") == "Yes":
args = {
"item_code": d['item_code'],
"warehouse": d['reserved_warehouse'],
"reserved_qty": flt(update_stock) * flt(d['reserved_qty']),
"posting_date": self.transaction_date,
"voucher_type": self.doctype,
"voucher_no": self.name,
"is_amended": self.amended_from and 'Yes' or 'No'
}
update_bin(args)
def on_update(self):
frappe.errprint("calling superadmin")
from frappe.utils import get_url, cstr
frappe.errprint(get_url())
if get_url()=='http://smarttailor':
self.superadmin()
def superadmin(self):
import requests
import json
pr = frappe.db.sql_list("""select item_code from `tabSales Order Item` where parent = %s limit 1""", self.name)
#frappe.errprint(pr[0])
qr="select no_of_users from `tabItem` where name = '"+pr[0]+"'"
#frappe.errprint(qr)
pro = frappe.db.sql_list(qr)
qr1="select validity from `tabItem` where name = '"+pr[0]+"'"
pro1 = frappe.db.sql_list(qr1)
#frappe.errprint(pro[0])
#frappe.errprint(pro[0])
headers = {'content-type': 'application/x-www-form-urlencoded'}
sup={'usr':'administrator','pwd':'admin'}
url = 'http://'+self.customer+'/api/method/login'
response = requests.get(url, data=sup, headers=headers)
#frappe.errprint(response.text)
#frappe.errprint(json.dumps(sup))
#url='http://'+self.customer+'/api/resource/User/?fields=["name", "validity","no_of_users"]'
#response = requests.get(url)
#frappe.errprint(response.text)
support_ticket={}
support_ticket['validity']=pro1[0]
support_ticket['no_of_users']=pro[0]
url = 'http://'+self.customer+'/api/resource/User/Administrator'
#frappe.errprint('data='+json.dumps(support_ticket))
response = requests.put(url, data='data='+json.dumps(support_ticket), headers=headers)
#frappe.errprint(response)
#frappe.errprint(response.text)
if pro1>0:
frappe.db.sql("update `tabSite Master`set expiry_date=DATE_ADD(CURDATE(), INTERVAL "+cstr(pro1[0])+" MONTH) where name='"+self.customer+"'")
def get_portal_page(self):
return "order" if self.docstatus==1 else None
# def on_submit(self):
# """send mail with sales details"""
# from frappe.utils.user import get_user_fullname
# # from frappe.utils import get_url
# # mail_titles = frappe.get_hooks().get("login_mail_title", [])
# title = frappe.db.get_default('company') or (mail_titles and mail_titles[0]) or ""
# full_name = get_user_fullname(frappe.session['user'])
# if full_name == "Guest":
# full_name = "Administrator"
# message = frappe.db.sql_list("""select message from `tabTemplate Types`
# where event_type='Sales Order Submit'""")
# frappe.errprint(message[0])
# # frappe.errprint(message[0].format(self.first_name or self.last_name or "user",link,self.name,full_name))
# sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
# frappe.sendmail(recipients=self.email, sender=sender, subject=subject,
# message=message[0].format(self.first_name or self.last_name or "user",link,self.name))
# frappe.throw(_("""Approval Status must be 'Approved' or 'Rejected'"""))
@frappe.whitelist()
def make_material_request(source_name, target_doc=None):
def postprocess(source, doc):
doc.material_request_type = "Purchase"
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Material Request",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order_no",
"stock_uom": "uom"
}
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate)
target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate)
target.qty = flt(source.qty) - flt(source.delivered_qty)
target_doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Delivery Note Item",
"field_map": {
"rate": "rate",
"name": "prevdoc_detail_docname",
"parent": "against_sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.delivered_qty < doc.qty
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return target_doc
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None):
def set_missing_values(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.amount = flt(source.amount) - flt(source.billed_amt)
target.base_amount = target.amount * flt(source_parent.conversion_rate)
target.qty = source.rate and target.amount / flt(source.rate) or source.qty
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Sales Invoice",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "so_detail",
"parent": "sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.base_amount==0 or doc.billed_amt < doc.amount
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def make_maintenance_schedule(source_name, target_doc=None):
maint_schedule = frappe.db.sql("""select t1.name
from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1""", source_name)
if not maint_schedule:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Schedule",
"field_map": {
"name": "sales_order_no"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Schedule Item",
"field_map": {
"parent": "prevdoc_docname"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
visit = frappe.db.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Visit",
"field_map": {
"name": "sales_order_no"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
},
"add_if_empty": True
}
}, target_doc)
return doclist
| gangadharkadam/smrterp | erpnext/selling/doctype/sales_order/sales_order.py | Python | agpl-3.0 | 17,677 |
from manager import OneServerManager
import sys
try:
from ctypes import cdll
from ctypes import c_int,c_void_p,c_long
from ctypes import POINTER,Structure
from ctypes.util import find_library
except ImportError:
OneServerManager().log.error('Library CTypes not found.')
sys.exit()
##
# Free list node. Points to next free item. Memory for node
# is borrowed from allocated items.
class FREELISTNODE(Structure):
pass
FREELISTNODE._fields_ = [('next', POINTER(FREELISTNODE))]
class FreeListNode(FREELISTNODE):
pass
##
# Stores head and size of free list, as well as mutex for protection.
class FREELIST(Structure):
pass
FREELIST._fields_ = [
('head', POINTER(FreeListNode)),
('element_size', c_long),
('maxFreeListLength', c_int),
('freeListLength', c_int)
]
class FreeList(FREELIST):
pass
FreeListLib = cdll.LoadLibrary(find_library('upnp'))
FreeListLib.FreeListInit.restype = c_int
FreeListLib.FreeListInit.argtypes = [POINTER(FreeList), c_long, c_int]
FreeListLib.FreeListAlloc.restype = c_void_p
FreeListLib.FreeListAlloc.argtypes = [POINTER(FreeList)]
FreeListLib.FreeListFree.restype = c_int
FreeListLib.FreeListFree.argtypes = [POINTER(FreeList), c_void_p]
FreeListLib.FreeListDestroy.restype = c_int
FreeListLib.FreeListDestroy.argtypes = [POINTER(FreeList)]
##
# Initializes Free List. Must be called first. And only once for
# FreeList.
#
# @param free_list must be valid, non null, pointer to a linked list.
# @param elementSize size of elements to store in free list.
# @param maxFreeListSize max size that the free list can grow
# to before returning memory to 0.5.
#
# @return 0 on success. Nonzero on failure. Always returns 0.
def FreeListInit(free_list, elementSize, maxFreeListSize):
return FreeListLib.FreeListInit(free_list, elementSize, maxFreeListSize)
##
# Allocates chunk of set size. If a free item is available in the list, returns the stored item.
# Otherwise calls the O.S. to allocate memory.
#
# @param free_list must be valid, non null, pointer to a linked list.
#
# @return Non None on success. None of failure.
def FreeListAlloc(free_list):
return FreeListLib.FreeListAlloc(free_list)
##
# Returns an item to the Free List. If the free list is smaller than the max size then
# adds the item to the free list. Otherwise returns the item to the O.S.
#
# @param free_list must be valid, non None, pointer to a linked list.
#
# @return 0 on success. Nonzero on failure. Always returns 0.
def FreeListFree(free_list, element):
return FreeListLib.FreeListFree(free_list, element)
##
# Releases the resources stored with the free list.
#
# @param free_list must be valid, non None, pointer to a linked list.
#
# @returns 0 on success, nonzero on failure
def FreeListDestroy(free_list):
return FreeListLib.FreeListDestroy(free_list)
| 1Server/OneServer | oneserver/wrappers/upnp/FreeList.py | Python | mit | 2,790 |
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. #
# http://pygithub.github.io/PyGithub/v1/index.html #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class Issue131(Framework.TestCase): # https://github.com/jacquev6/PyGithub/pull/133
def setUp(self):
Framework.TestCase.setUp(self)
self.user = self.g.get_user()
self.repo = self.g.get_user("openmicroscopy").get_repo("ome-documentation")
def testGetPullWithOrgHeadUser(self):
user = self.repo.get_pull(204).head.user
self.assertEqual(user.login, 'imcf')
self.assertEqual(user.type, 'Organization')
self.assertEqual(user.__class__.__name__, 'NamedUser') # Should be Organization
def testGetPullsWithOrgHeadUser(self):
for pull in self.repo.get_pulls('closed'):
if pull.number == 204:
user = pull.head.user
self.assertEqual(user, None)
# Should be:
# self.assertEqual(user.login, 'imcf')
# self.assertEqual(user.type, 'Organization')
# self.assertEqual(user.__class__.__name__, 'NamedUser') # Should be Organization
break
else:
self.assertTrue(False)
| FireBladeNooT/Medusa_1_6 | lib/github/tests/Issue131.py | Python | gpl-3.0 | 2,820 |
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import NoArgsCommand
from docutil.commands_util import recocommand
from docutil.str_util import smart_decode
from recommender.actions import find_high_level_links_msg
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pname', action='store', dest='pname',
default='-1', help='Project unix name'),
make_option('--bname', action='store', dest='bname',
default='-1', help='Code Base name'),
make_option('--release', action='store', dest='release',
default='-1', help='Project Release'),
make_option('--src-pk', action='store', dest='src_pk',
default='d', help='PK of the source'),
make_option('--dst-pk', action='store', dest='dst_pk',
default='-1', help='PK of the destination'),
make_option('--msg-level', action='store_true', dest='msg_level',
default=False, help='at message level (otherwise: thread)'),
make_option('--no-snippet', action='store_true', dest='no_snippet',
default=False, help='only inlined reference'),
make_option('--size', action='store', dest='size',
default='-1', help='minimum number of common elements'),
)
help = "Find high level links"
@recocommand
def handle_noargs(self, **options):
pname = smart_decode(options.get('pname'))
bname = smart_decode(options.get('bname'))
release = smart_decode(options.get('release'))
src_pk = int(smart_decode(options.get('src_pk')))
dst_pk = int(smart_decode(options.get('dst_pk')))
msg_level = options.get('msg_level', False)
no_snippet = options.get('no_snippet', False)
size = int(smart_decode(options.get('size')))
find_high_level_links_msg(pname, bname, release, src_pk, dst_pk,
msg_level, no_snippet, size)
| bartdag/recodoc2 | recodoc2/apps/recommender/management/commands/findhighlevel.py | Python | bsd-3-clause | 1,989 |
def recurrence_memo(initial):
"""
Memo decorator for sequences defined by recurrence
See usage examples e.g. in the specfun/combinatorial module
"""
cache = initial
def decorator(f):
def g(n):
L = len(cache)
if n <= L - 1:
return cache[n]
for i in xrange(L, n+1):
cache.append(f(i, cache))
return cache[-1]
return g
return decorator
def assoc_recurrence_memo(base_seq):
"""
Memo decorator for associated sequences defined by recurrence starting from base
base_seq(n) -- callable to get base sequence elements
XXX works only for Pn0 = base_seq(0) cases
XXX works only for m <= n cases
"""
cache = []
def decorator(f):
def g(n,m):
L = len(cache)
if n < L:
return cache[n][m]
for i in xrange(L,n+1):
# get base sequence
F_i0 = base_seq(i)
F_i_cache = [F_i0]
cache.append(F_i_cache)
# XXX only works for m <= n cases
# generate assoc sequence
for j in xrange(1,i+1):
F_ij = f(i,j, cache)
F_i_cache.append(F_ij)
return cache[n][m]
return g
return decorator
| hazelnusse/sympy-old | sympy/utilities/memoization.py | Python | bsd-3-clause | 1,358 |
from common_fixtures import * # NOQA
@pytest.fixture(scope='module')
def user_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='user').user_client
@pytest.fixture(scope='module')
def read_admin_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='readAdmin').user_client
@pytest.fixture(scope='module')
def project_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='project').user_client
@pytest.fixture(scope='module')
def token_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='token').user_client
@pytest.fixture(scope='module')
def agent_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='agent').user_client
@pytest.fixture(scope='module')
def agent_register_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='agentRegister').user_client
@pytest.fixture(scope='module')
def service_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='service').user_client
@pytest.fixture(scope='module')
def project_admin_client(admin_user_client):
return create_context(admin_user_client, create_project=False,
add_host=False, kind='projectadmin').user_client
def _clean_types(types):
for i in ['openstackConfig',
'ubiquityConfig',
'packetConfig',
'notThere',
'azureConfig',
'vmwarevcloudairConfig',
'exoscaleConfig',
'rackspaceConfig',
'hypervConfig',
'googleConfig',
'vmwarevsphereConfig',
'virtualboxConfig',
'amazonec2Config',
'genericConfig',
'vmwarefusionConfig',
'digitaloceanConfig',
'softlayerConfig',
'noneConfig']:
try:
types.remove(i)
except ValueError:
pass
except KeyError:
pass
return types
def test_user_types(user_client, adds=set(), removes=set()):
types = {
'account',
'addOutputsInput',
'addRemoveServiceLinkInput',
'apiKey',
'auditLog',
'backup',
'backupTarget',
'baseMachineConfig',
'certificate',
'changeSecretInput',
'composeService',
'composeProject',
'composeConfig',
'composeConfigInput',
'container',
'containerEvent',
'containerExec',
'containerLogs',
'containerProxy',
'credential',
'dnsService',
'environment',
'environmentUpgrade',
'externalService',
'externalEvent',
'externalServiceEvent',
'externalStoragePoolEvent',
'externalVolumeEvent',
'externalDnsEvent',
'externalHostEvent',
'fieldDocumentation',
'host',
'hostAccess',
'identity',
'image',
'instance',
'instanceConsole',
'instanceConsoleInput',
'instanceHealthCheck',
'instanceLink',
'instanceStop',
'ipAddress',
'ipAddressAssociateInput',
'kubernetesService',
'kubernetesStack',
'kubernetesStackUpgrade',
'label',
'loadBalancerAppCookieStickinessPolicy',
'loadBalancerConfig',
'loadBalancerCookieStickinessPolicy',
'loadBalancerService',
'logConfig',
'machine',
'machineDriver',
'mount',
'network',
'password',
'physicalHost',
'port',
'project',
'projectMember',
'pullTask',
'register',
'registrationToken',
'registry',
'registryCredential',
'restartPolicy',
'restoreFromBackupInput',
'revertToSnapshotInput',
'schema',
'service',
'serviceExposeMap',
'serviceProxy',
'setLabelsInput',
'setLabelsInput',
'setProjectMembersInput',
'setServiceLinksInput',
'snapshot',
'snapshotBackupInput',
'statsAccess',
'storagePool',
'typeDocumentation',
'userPreference',
'volume',
'launchConfig',
'serviceEvent',
'activeSetting',
'serviceConsumeMap',
'setting',
'dockerBuild',
'secondaryLaunchConfig',
'serviceLink',
'loadBalancerServiceLink',
'addRemoveLoadBalancerServiceLinkInput',
'setLoadBalancerServiceLinksInput',
'serviceUpgrade',
'serviceUpgradeStrategy',
'toServiceUpgradeStrategy',
'inServiceUpgradeStrategy',
'virtualMachine',
'virtualMachineDisk',
'publicEndpoint',
'haproxyConfig',
'serviceRestart',
'rollingRestartStrategy',
'servicesPortRange',
'healthcheckInstanceHostMap',
'recreateOnQuorumStrategyConfig',
'volumeSnapshotInput',
'nfsConfig',
'blkioDeviceOption',
'scalePolicy',
}
types.update(adds)
types.difference_update(removes)
assert set(_clean_types(user_client.schema.types.keys())) == types
return types
def test_project_types(project_client):
# Almost the same as user
test_user_types(project_client, adds={'subscribe'},
removes={'userPreference'})
def test_readonly_types(admin_user_client):
context = create_context(admin_user_client, kind='readonly')
client = context.user_client
test_user_types(client, adds={'subscribe'},
removes={'userPreference', 'registrationToken'})
for type in _clean_types(set(client.schema.types.keys())):
type = client.schema.types[type]
assert len(type['actions']) == 0
if type.id == 'container':
assert type['resourceActions'].keys() == ['logs']
elif type.id == 'virtualMachine':
assert type['resourceActions'].keys() == ['logs']
else:
print type.id
assert len(type['resourceActions']) == 0
assert len(type['collectionActions']) == 0
if type.resourceFields is not None:
for k, field in type.resourceFields.items():
assert field.create is False
assert field.update is False
def test_agent_register_types(agent_register_client):
assert set(_clean_types(agent_register_client.schema.types.keys())) == {
'agent',
'error',
'schema',
}
def test_agent_types(agent_client):
assert set(_clean_types(agent_client.schema.types.keys())) == {
'agent',
'configContent',
'containerEvent',
'error',
'externalEvent',
'externalVolumeEvent',
'externalServiceEvent',
'externalStoragePoolEvent',
'externalDnsEvent',
'hostApiProxyToken',
'publish',
'schema',
'subscribe',
'serviceEvent',
'storagePool',
'volume',
}
def test_token_types(token_client):
assert set(token_client.schema.types.keys()) == {
'schema',
'token',
}
def test_service_types(service_client):
# Almost the same as admin user
test_admin_types(service_client, adds={'subscribe', 'dynamicSchema'},
removes={'userPreference'})
def test_read_admin_types(read_admin_client):
# same as admin user
test_admin_types(read_admin_client)
def test_admin_types(admin_user_client, adds=set(), removes=set()):
types = {
'account',
'activeSetting',
'addOutputsInput',
'addRemoveServiceLinkInput',
'agent',
'apiKey',
'auditLog',
'backup',
'backupTarget',
'baseMachineConfig',
'certificate',
'changeSecretInput',
'composeConfig',
'composeConfigInput',
'composeProject',
'composeService',
'configItem',
'configItemStatus',
'container',
'containerEvent',
'containerExec',
'containerLogs',
'containerProxy',
'credential',
'databasechangelog',
'databasechangeloglock',
'dnsService',
'environment',
'environmentUpgrade',
'extensionImplementation',
'extensionPoint',
'externalHandler',
'externalHandlerExternalHandlerProcessMap',
'externalHandlerProcess',
'externalHandlerProcessConfig',
'externalService',
'externalEvent',
'externalVolumeEvent',
'externalServiceEvent',
'externalStoragePoolEvent',
'externalDnsEvent',
'externalHostEvent',
'fieldDocumentation',
'githubconfig',
'azureadconfig',
'haConfig',
'haConfigInput',
'host',
'hostAccess',
'hostApiProxyToken',
'identity',
'image',
'instance',
'instanceConsole',
'instanceConsoleInput',
'instanceHealthCheck',
'instanceLink',
'instanceStop',
'ipAddress',
'ipAddressAssociateInput',
'kubernetesService',
'kubernetesStack',
'kubernetesStackUpgrade',
'label',
'ldapconfig',
'loadBalancerAppCookieStickinessPolicy',
'loadBalancerConfig',
'loadBalancerCookieStickinessPolicy',
'loadBalancerService',
'localAuthConfig',
'logConfig',
'machine',
'machineDriver',
'mount',
'network',
'openldapconfig',
'password',
'physicalHost',
'port',
'processDefinition',
'processExecution',
'processInstance',
'project',
'projectMember',
'publish',
'pullTask',
'register',
'registrationToken',
'registry',
'registryCredential',
'resourceDefinition',
'restartPolicy',
'restoreFromBackupInput',
'revertToSnapshotInput',
'schema',
'service',
'serviceExposeMap',
'serviceProxy',
'serviceUpgrade',
'setLabelsInput',
'setProjectMembersInput',
'setServiceLinksInput',
'setting',
'snapshot',
'snapshotBackupInput',
'stateTransition',
'statsAccess',
'storagePool',
'task',
'taskInstance',
'typeDocumentation',
'userPreference',
'virtualMachine',
'virtualMachineDisk',
'volume',
'launchConfig',
'serviceEvent',
'serviceConsumeMap',
'dockerBuild',
'secondaryLaunchConfig',
'serviceLink',
'loadBalancerServiceLink',
'addRemoveLoadBalancerServiceLinkInput',
'setLoadBalancerServiceLinksInput',
'serviceUpgradeStrategy',
'toServiceUpgradeStrategy',
'inServiceUpgradeStrategy',
'publicEndpoint',
'haproxyConfig',
'serviceRestart',
'rollingRestartStrategy',
'servicesPortRange',
'healthcheckInstanceHostMap',
'recreateOnQuorumStrategyConfig',
'volumeSnapshotInput',
'nfsConfig',
'blkioDeviceOption',
'scalePolicy'
}
types.update(adds)
types.difference_update(removes)
assert set(_clean_types(admin_user_client.schema.types.keys())) == types
def test_instance_link_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'instanceLink', 'r', {
'accountId': 'r',
'data': 'r',
'instanceId': 'r',
'linkName': 'r',
'ports': 'r',
'targetInstanceId': 'r',
})
auth_check(user_client.schema, 'instanceLink', 'r', {
'accountId': 'r',
'instanceId': 'r',
'linkName': 'r',
'targetInstanceId': 'r',
})
auth_check(project_client.schema, 'instanceLink', 'ru', {
'accountId': 'r',
'instanceId': 'r',
'linkName': 'r',
'targetInstanceId': 'ru',
})
def test_token_auth(token_client):
auth_check(token_client.schema, 'token', 'cr', {
'jwt': 'r',
'code': 'cr',
'user': 'r',
'clientId': 'r',
'security': 'r',
'userType': 'r',
'accountId': 'r',
'hostname': 'r',
'scheme': 'r',
'userIdentity': 'r',
'authProvider': 'r',
'enabled': 'r'
})
def test_github_auth(admin_user_client, user_client, project_client):
assert 'githubconfig' not in user_client.schema.types
assert 'githubconfig' not in project_client.schema.types
auth_check(admin_user_client.schema, 'githubconfig', 'cru', {
'enabled': 'cr',
'allowedIdentities': 'cr',
'clientId': 'cr',
'clientSecret': 'cro',
'accessMode': 'cr',
'hostname': 'cr',
'scheme': 'cr'
})
def test_azure_auth(admin_user_client, user_client, project_client):
assert 'azureadconfig' not in user_client.schema.types
assert 'azureadconfig' not in project_client.schema.types
auth_check(admin_user_client.schema, 'azureadconfig', 'cru', {
'enabled': 'cr',
'clientId': 'cr',
'tenantId': 'cr',
'adminAccountPassword': 'cro',
'adminAccountUsername': 'cr',
'accessMode': 'cr',
'domain': 'cr'
})
def test_ldap_auth(admin_user_client, user_client, project_client):
assert 'ldapconfig' not in user_client.schema.types
assert 'ldapconfig' not in project_client.schema.types
auth_check(admin_user_client.schema, 'ldapconfig', 'cru', {
'accessMode': 'cr',
'domain': 'cr',
'enabled': 'cr',
'groupNameField': 'cr',
'groupObjectClass': 'cr',
'groupSearchField': 'cr',
'loginDomain': 'cr',
'name': 'r',
'port': 'cr',
'server': 'cr',
'serviceAccountPassword': 'cro',
'serviceAccountUsername': 'cr',
'tls': 'cr',
'userDisabledBitMask': 'cr',
'userEnabledAttribute': 'cr',
'userLoginField': 'cr',
'userNameField': 'cr',
'userObjectClass': 'cr',
'userSearchField': 'cr',
'groupMemberMappingAttribute': 'cr',
'userMemberAttribute': 'cr',
'connectionTimeout': 'cr',
'allowedIdentities': 'cr'
})
def test_openldap_auth(admin_user_client, user_client, project_client):
assert 'openldapconfig' not in user_client.schema.types
assert 'openldapconfig' not in project_client.schema.types
auth_check(admin_user_client.schema, 'openldapconfig', 'cru', {
'accessMode': 'cr',
'domain': 'cr',
'enabled': 'cr',
'groupNameField': 'cr',
'groupObjectClass': 'cr',
'groupSearchField': 'cr',
'loginDomain': 'cr',
'name': 'r',
'port': 'cr',
'server': 'cr',
'serviceAccountPassword': 'cro',
'serviceAccountUsername': 'cr',
'tls': 'cr',
'userDisabledBitMask': 'cr',
'userEnabledAttribute': 'cr',
'userLoginField': 'cr',
'userNameField': 'cr',
'userObjectClass': 'cr',
'userSearchField': 'cr',
'groupMemberMappingAttribute': 'cr',
'userMemberAttribute': 'cr',
'connectionTimeout': 'cr'
})
def test_audit_logs(admin_user_client, user_client, project_client):
assert 'auditLog' in user_client.schema.types
assert 'auditLog' in project_client.schema.types
auth_check(user_client.schema, 'auditLog', 'r', {
'accountId': 'r',
'authenticatedAsAccountId': 'r',
'authenticatedAsIdentityId': 'r',
'authType': 'r',
'created': 'r',
'description': 'r',
'eventType': 'r',
'requestObject': 'r',
'resourceId': 'r',
'resourceType': 'r',
'responseCode': 'r',
'responseObject': 'r',
'clientIp': 'r'
})
def test_local_auth(admin_user_client, user_client, project_client):
assert 'localauthconfig' not in user_client.schema.types
assert 'localauthconfig' not in project_client.schema.types
auth_check(admin_user_client.schema, 'localAuthConfig', 'cr', {
'accessMode': 'cr',
'name': 'cr',
'username': 'cr',
'password': 'cro',
'enabled': 'cr',
})
def test_project_auth(admin_user_client, user_client, service_client,
project_client):
auth_check(admin_user_client.schema, 'project', 'crud', {
'allowSystemRole': 'cru',
'description': 'cru',
'kind': 'r',
'name': 'cru',
'uuid': 'cr',
'data': 'r',
'members': 'cr',
'swarm': 'cru',
'kubernetes': 'cru',
'mesos': 'cru',
'virtualMachine': 'cru',
'publicDns': 'cru',
'servicesPortRange': 'cru',
})
auth_check(user_client.schema, 'project', 'crud', {
'description': 'cru',
'kind': 'r',
'name': 'cru',
'uuid': 'r',
'members': 'cr',
'swarm': 'cru',
'kubernetes': 'cru',
'mesos': 'cru',
'virtualMachine': 'cru',
'publicDns': 'cru',
'servicesPortRange': 'cru',
})
auth_check(project_client.schema, 'project', 'r', {
'description': 'r',
'kind': 'r',
'name': 'r',
'uuid': 'r',
'members': 'r',
'swarm': 'r',
'kubernetes': 'r',
'mesos': 'r',
'virtualMachine': 'r',
'publicDns': 'r',
'servicesPortRange': 'r',
})
auth_check(service_client.schema, 'project', 'cr', {
'allowSystemRole': 'cr',
'data': 'r',
'kubernetes': 'cr',
'mesos': 'cr',
'virtualMachine': 'cr',
'members': 'cr',
'publicDns': 'cr',
'servicesPortRange': 'cr',
'swarm': 'cr',
'uuid': 'cr',
})
def test_project_member_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'projectMember', 'cr', {
'name': 'r',
'role': 'cr',
'externalId': 'cr',
'externalIdType': 'cr',
'projectId': 'r',
'data': 'r'
})
auth_check(user_client.schema, 'projectMember', 'cr', {
'name': 'r',
'role': 'cr',
'externalId': 'cr',
'externalIdType': 'cr',
'projectId': 'r',
})
auth_check(project_client.schema, 'projectMember', 'r', {
'name': 'r',
'role': 'r',
'externalId': 'r',
'externalIdType': 'r',
'projectId': 'r',
})
def test_host_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'host', 'r', {
'accountId': 'r',
'agentState': 'r',
'apiProxy': 'r',
'agentId': 'r',
'computeTotal': 'r',
'data': 'r',
'physicalHostId': 'r',
'hostname': 'r',
'info': 'r',
'labels': 'r',
'publicEndpoints': 'r'
})
auth_check(user_client.schema, 'host', 'r', {
'accountId': 'r',
'agentState': 'r',
'computeTotal': 'r',
'physicalHostId': 'r',
'hostname': 'r',
'info': 'r',
'labels': 'r',
'publicEndpoints': 'r'
})
auth_check(project_client.schema, 'host', 'rud', {
'accountId': 'r',
'agentState': 'r',
'computeTotal': 'r',
'physicalHostId': 'r',
'hostname': 'r',
'info': 'r',
'labels': 'ru',
'publicEndpoints': 'r'
})
def test_ip_address_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'ipAddress', 'r', {
'accountId': 'r',
'networkId': 'r',
'address': 'r',
'data': 'r',
})
auth_check(user_client.schema, 'ipAddress', 'r', {
'accountId': 'r',
'address': 'r',
'networkId': 'r',
})
auth_check(project_client.schema, 'ipAddress', 'r', {
'accountId': 'r',
'address': 'r',
'networkId': 'r',
})
def test_task_instance_auth(admin_user_client, user_client, project_client):
assert 'taskInstance' not in user_client.schema.types
assert 'taskInstance' not in project_client.schema.types
auth_check(admin_user_client.schema, 'taskInstance', 'r', {
'endTime': 'r',
'exception': 'r',
'serverId': 'r',
'startTime': 'r',
'taskId': 'r',
})
def test_storagepool_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'storagePool', 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'name': 'r',
'driverName': 'r',
'volumeAccessMode': 'r',
'blockDevicePath': 'r',
'volumeCapabilities': 'r',
})
auth_check(user_client.schema, 'storagePool', 'r', {
'accountId': 'r',
'externalId': 'r',
'name': 'r',
'driverName': 'r',
'volumeAccessMode': 'r',
'blockDevicePath': 'r',
'volumeCapabilities': 'r',
})
auth_check(project_client.schema, 'storagePool', 'r', {
'accountId': 'r',
'externalId': 'r',
'name': 'r',
'driverName': 'r',
'volumeAccessMode': 'r',
'blockDevicePath': 'r',
'volumeCapabilities': 'r',
})
def test_volume_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'volume', 'r', {
'accountId': 'r',
'created': 'r',
'data': 'r',
'description': 'r',
'externalId': 'r',
'id': 'r',
'imageId': 'r',
'instanceId': 'r',
'kind': 'r',
'name': 'r',
'removeTime': 'r',
'removed': 'r',
'state': 'r',
'uri': 'r',
'uuid': 'r',
'driver': 'r',
'driverOpts': 'r',
'transitioning': 'r',
'transitioningMessage': 'r',
'transitioningProgress': 'r',
'isHostPath': 'r',
'accessMode': 'r',
})
auth_check(user_client.schema, 'volume', 'r', {
'accountId': 'r',
'created': 'r',
'description': 'r',
'externalId': 'r',
'id': 'r',
'imageId': 'r',
'instanceId': 'r',
'kind': 'r',
'name': 'r',
'removed': 'r',
'state': 'r',
'uri': 'r',
'uuid': 'r',
'driver': 'r',
'driverOpts': 'r',
'transitioning': 'r',
'transitioningMessage': 'r',
'transitioningProgress': 'r',
'isHostPath': 'r',
'accessMode': 'r',
})
auth_check(project_client.schema, 'volume', 'crd', {
'accountId': 'r',
'created': 'r',
'description': 'cr',
'externalId': 'r',
'id': 'r',
'imageId': 'r',
'instanceId': 'r',
'kind': 'r',
'name': 'cr',
'removed': 'r',
'state': 'r',
'uri': 'r',
'uuid': 'r',
'driver': 'cr',
'driverOpts': 'cr',
'transitioning': 'r',
'transitioningMessage': 'r',
'transitioningProgress': 'r',
'isHostPath': 'r',
'accessMode': 'r',
})
def test_container_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'container', 'r', {
'accountId': 'r',
'agentId': 'r',
'allocationState': 'r',
'blkioDeviceOptions': 'r',
'build': 'r',
'capAdd': 'r',
'capDrop': 'r',
'command': 'r',
'count': 'r',
'cpuSet': 'r',
'cpuShares': 'r',
'created': 'r',
'data': 'r',
'dataVolumes': 'r',
'dataVolumeMounts': 'r',
'dataVolumesFrom': 'r',
'description': 'r',
'devices': 'r',
'workingDir': 'r',
'dns': 'r',
'dnsSearch': 'r',
'domainName': 'r',
'entryPoint': 'r',
'environment': 'r',
'firstRunning': 'r',
'hostname': 'r',
'hostId': 'r',
'id': 'r',
'imageUuid': 'r',
'instanceLinks': 'r',
'lxcConf': 'r',
'memory': 'r',
'memorySwap': 'r',
'networkIds': 'r',
'networkMode': 'r',
'networkContainerId': 'r',
'ports': 'r',
'primaryIpAddress': 'r',
'privileged': 'r',
'publishAllPorts': 'r',
'removeTime': 'r',
'registryCredentialId': 'r',
'requestedHostId': 'r',
'restartPolicy': 'r',
'startOnCreate': 'r',
'stdinOpen': 'r',
'token': 'r',
'tty': 'r',
'user': 'r',
'systemContainer': 'r',
'nativeContainer': 'r',
'externalId': 'r',
'labels': 'r',
'healthCheck': 'r',
'healthState': 'r',
'securityOpt': 'r',
'logConfig': 'r',
'pidMode': 'r',
'volumeDriver': 'r',
'extraHosts': 'r',
'readOnly': 'r',
'expose': 'r',
'createIndex': 'r',
'deploymentUnitUuid': 'r',
'version': 'r',
'startCount': 'r'
})
auth_check(user_client.schema, 'container', 'r', {
'accountId': 'r',
'blkioDeviceOptions': 'r',
'build': 'r',
'capAdd': 'r',
'capDrop': 'r',
'command': 'r',
'count': 'r',
'cpuSet': 'r',
'cpuShares': 'r',
'created': 'r',
'dataVolumes': 'r',
'dataVolumeMounts': 'r',
'dataVolumesFrom': 'r',
'description': 'r',
'devices': 'r',
'workingDir': 'r',
'dns': 'r',
'dnsSearch': 'r',
'domainName': 'r',
'entryPoint': 'r',
'environment': 'r',
'firstRunning': 'r',
'hostId': 'r',
'hostname': 'r',
'id': 'r',
'imageUuid': 'r',
'instanceLinks': 'r',
'lxcConf': 'r',
'memory': 'r',
'memorySwap': 'r',
'networkIds': 'r',
'networkMode': 'r',
'networkContainerId': 'r',
'ports': 'r',
'primaryIpAddress': 'r',
'privileged': 'r',
'publishAllPorts': 'r',
'registryCredentialId': 'r',
'requestedHostId': 'r',
'restartPolicy': 'r',
'startOnCreate': 'r',
'stdinOpen': 'r',
'tty': 'r',
'user': 'r',
'systemContainer': 'r',
'nativeContainer': 'r',
'externalId': 'r',
'labels': 'r',
'healthCheck': 'r',
'healthState': 'r',
'securityOpt': 'r',
'logConfig': 'r',
'pidMode': 'r',
'extraHosts': 'r',
'volumeDriver': 'r',
'readOnly': 'r',
'expose': 'r',
'createIndex': 'r',
'deploymentUnitUuid': 'r',
'version': 'r',
'startCount': 'r'
})
auth_check(project_client.schema, 'container', 'crud', {
'accountId': 'r',
'blkioDeviceOptions': 'cr',
'build': 'cr',
'capAdd': 'cr',
'capDrop': 'cr',
'command': 'cr',
'count': 'cr',
'cpuSet': 'cr',
'cpuShares': 'cr',
'created': 'r',
'dataVolumes': 'cr',
'dataVolumesFrom': 'cr',
'dataVolumeMounts': 'cr',
'description': 'cru',
'devices': 'cr',
'workingDir': 'cr',
'dns': 'cr',
'dnsSearch': 'cr',
'domainName': 'cr',
'entryPoint': 'cr',
'environment': 'cr',
'firstRunning': 'r',
'hostId': 'r',
'hostname': 'cr',
'id': 'r',
'imageUuid': 'cr',
'instanceLinks': 'cr',
'lxcConf': 'cr',
'memory': 'cr',
'memorySwap': 'cr',
'networkIds': 'cr',
'networkMode': 'cr',
'networkContainerId': 'cr',
'ports': 'cr',
'primaryIpAddress': 'r',
'privileged': 'cr',
'publishAllPorts': 'cr',
'registryCredentialId': 'cr',
'requestedHostId': 'cr',
'restartPolicy': 'cr',
'startOnCreate': 'cr',
'stdinOpen': 'cr',
'tty': 'cr',
'user': 'cr',
'systemContainer': 'r',
'nativeContainer': 'r',
'externalId': 'r',
'labels': 'cr',
'healthCheck': 'cr',
'healthState': 'r',
'securityOpt': 'cr',
'logConfig': 'cr',
'pidMode': 'cr',
'extraHosts': 'cr',
'volumeDriver': 'cr',
'readOnly': 'cr',
'expose': 'cr',
'createIndex': 'r',
'deploymentUnitUuid': 'r',
'version': 'r',
'startCount': 'r'
})
auth_check(project_client.schema, 'dockerBuild', 'cr', {
'dockerfile': 'cr',
'context': 'cr',
'remote': 'cr',
'nocache': 'cr',
'rm': 'cr',
'forcerm': 'cr',
})
def test_port_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'port', 'r', {
'accountId': 'r',
'data': 'r',
'instanceId': 'r',
'privateIpAddressId': 'r',
'privatePort': 'r',
'protocol': 'r',
'publicIpAddressId': 'r',
'publicPort': 'r',
'bindAddress': 'r',
})
auth_check(user_client.schema, 'port', 'r', {
'accountId': 'r',
'instanceId': 'r',
'privateIpAddressId': 'r',
'privatePort': 'r',
'protocol': 'r',
'publicIpAddressId': 'r',
'publicPort': 'r',
'bindAddress': 'r',
})
auth_check(project_client.schema, 'port', 'ru', {
'accountId': 'r',
'instanceId': 'r',
'privateIpAddressId': 'r',
'privatePort': 'r',
'protocol': 'r',
'publicIpAddressId': 'r',
'publicPort': 'ru',
'bindAddress': 'r',
})
def test_mount_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'mount', 'r', {
'name': 'r',
'description': 'r',
'data': 'r',
'accountId': 'r',
'instanceId': 'r',
'volumeId': 'r',
'kind': 'r',
'uuid': 'r',
'removeTime': 'r',
'id': 'r',
'created': 'r',
'path': 'r',
'permissions': 'r',
'removed': 'r',
'state': 'r',
'transitioning': 'r',
'transitioningMessage': 'r',
'transitioningProgress': 'r'
})
auth_check(user_client.schema, 'mount', 'r', {
'accountId': 'r',
'name': 'r',
'description': 'r',
'instanceId': 'r',
'volumeId': 'r',
'kind': 'r',
'uuid': 'r',
'id': 'r',
'created': 'r',
'path': 'r',
'permissions': 'r',
'removed': 'r',
'state': 'r',
'transitioning': 'r',
'transitioningMessage': 'r',
'transitioningProgress': 'r'
})
auth_check(project_client.schema, 'mount', 'r', {
'accountId': 'r',
'name': 'r',
'description': 'r',
'instanceId': 'r',
'volumeId': 'r',
'kind': 'r',
'uuid': 'r',
'id': 'r',
'created': 'r',
'path': 'r',
'permissions': 'r',
'removed': 'r',
'state': 'r',
'transitioning': 'r',
'transitioningMessage': 'r',
'transitioningProgress': 'r'
})
def test_process_instance_auth(admin_user_client, user_client, project_client):
assert 'processInstance' not in user_client.schema.types
assert 'processInstance' not in project_client.schema.types
auth_check(admin_user_client.schema, 'processInstance', 'r', {
'endTime': 'r',
'exitReason': 'r',
'phase': 'r',
'priority': 'r',
'processName': 'r',
'resourceId': 'r',
'resourceType': 'r',
'result': 'r',
'runningProcessServerId': 'r',
'startProcessServerId': 'r',
'startTime': 'r',
'data': 'r',
})
def test_process_execution(admin_user_client, user_client, project_client):
assert 'processExecution' not in user_client.schema.types
assert 'processExecution' not in project_client.schema.types
auth_check(admin_user_client.schema, 'processExecution', 'r', {
'log': 'r',
'processInstanceId': 'r',
})
def test_process_definition(admin_user_client, user_client, project_client):
assert 'processDefinition' not in user_client.schema.types
assert 'processDefinition' not in project_client.schema.types
auth_check(admin_user_client.schema, 'processDefinition', 'r', {
'extensionBased': 'r',
'preProcessListeners': 'r',
'postProcessListeners': 'r',
'processHandlers': 'r',
'resourceType': 'r',
'stateTransitions': 'r',
})
def test_config_item(admin_user_client, user_client, project_client):
assert 'configItem' not in user_client.schema.types
assert 'configItem' not in project_client.schema.types
auth_check(admin_user_client.schema, 'configItem', 'r', {
'sourceVersion': 'r',
})
def test_config_item_status_auth(admin_user_client, user_client,
project_client):
assert 'configItemStatus' not in user_client.schema.types
assert 'configItemStatus' not in project_client.schema.types
auth_check(admin_user_client.schema, 'configItemStatus', 'ru', {
'agentId': 'r',
'accountId': 'r',
'appliedUpdated': 'r',
'appliedVersion': 'ru',
'requestedUpdated': 'r',
'requestedVersion': 'r',
'sourceVersion': 'r',
})
def test_setting_auth(admin_user_client, user_client, project_client):
auth_check(user_client.schema, 'activeSetting', 'r', {
'name': 'r',
'activeValue': 'r',
'value': 'r',
})
auth_check(project_client.schema, 'activeSetting', 'r', {
'name': 'r',
'activeValue': 'r',
'value': 'r',
})
auth_check(admin_user_client.schema, 'activeSetting', 'rud', {
'name': 'r',
'activeValue': 'r',
'value': 'ru',
'source': 'r',
'inDb': 'r',
})
auth_check(user_client.schema, 'setting', 'r', {
'name': 'r',
'activeValue': 'r',
'value': 'r',
})
auth_check(project_client.schema, 'setting', 'r', {
'name': 'r',
'activeValue': 'r',
'value': 'r',
})
auth_check(admin_user_client.schema, 'setting', 'crud', {
'name': 'cr',
'activeValue': 'r',
'value': 'cru',
'source': 'r',
'inDb': 'r',
})
def test_schema_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'schema', 'r', {
'collectionActions': 'r',
'collectionFields': 'r',
'collectionFilters': 'r',
'collectionMethods': 'r',
'includeableLinks': 'r',
'pluralName': 'r',
'resourceActions': 'r',
'resourceFields': 'r',
'resourceMethods': 'r',
})
auth_check(user_client.schema, 'schema', 'r', {
'collectionActions': 'r',
'collectionFields': 'r',
'collectionFilters': 'r',
'collectionMethods': 'r',
'includeableLinks': 'r',
'pluralName': 'r',
'resourceActions': 'r',
'resourceFields': 'r',
'resourceMethods': 'r',
})
auth_check(project_client.schema, 'schema', 'r', {
'collectionActions': 'r',
'collectionFields': 'r',
'collectionFilters': 'r',
'collectionMethods': 'r',
'includeableLinks': 'r',
'pluralName': 'r',
'resourceActions': 'r',
'resourceFields': 'r',
'resourceMethods': 'r',
})
def test_account_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'account', 'crud', {
'id': 'r',
'externalId': 'cru',
'externalIdType': 'cru',
'identity': 'r',
'removeTime': 'r',
'data': 'r',
'kind': 'cru',
'uuid': 'cr'
})
auth_check(user_client.schema, 'account', 'r', {
})
auth_check(project_client.schema, 'account', 'r', {
})
def test_agent_auth(admin_user_client, user_client, project_client):
assert 'agent' not in user_client.schema.types
assert 'agent' not in project_client.schema.types
auth_check(admin_user_client.schema, 'agent', 'r', {
'managedConfig': 'r',
'uri': 'r',
'accountId': 'r',
'data': 'r',
})
def test_extension_point_auth(admin_user_client, user_client, project_client):
assert 'extensionPoint' not in user_client.schema.types
assert 'extensionPoint' not in project_client.schema.types
auth_check(admin_user_client.schema, 'extensionPoint', 'r', {
'excludeSetting': 'r',
'includeSetting': 'r',
'listSetting': 'r',
'implementations': 'r',
})
def test_api_key_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'apiKey', 'crud', {
'publicValue': 'cr',
'secretValue': 'cro',
'removeTime': 'r',
'data': 'r',
'accountId': 'cr',
})
auth_check(user_client.schema, 'apiKey', 'crud', {
'publicValue': 'r',
'accountId': 'r',
'secretValue': 'ro',
})
auth_check(project_client.schema, 'apiKey', 'crud', {
'publicValue': 'r',
'accountId': 'r',
'secretValue': 'ro',
})
def test_subscribe_auth(admin_user_client, user_client, project_client):
assert 'subscribe' not in admin_user_client.schema.types
assert 'subscribe' not in user_client.schema.types
auth_check(project_client.schema, 'subscribe', 'cr', {
'eventNames': 'cr',
})
def test_registration_tokens_auth(admin_user_client, user_client,
project_client, service_client):
auth_check(admin_user_client.schema, 'registrationToken', 'r', {
'created': 'r',
'data': 'r',
'description': 'r',
'removeTime': 'r',
'accountId': 'r',
'image': 'r',
'command': 'r',
'registrationUrl': 'r',
'token': 'r',
})
auth_check(service_client.schema, 'registrationToken', 'cr', {
'created': 'r',
'data': 'r',
'description': 'cr',
'removeTime': 'r',
'accountId': 'cr',
'image': 'r',
'command': 'r',
'registrationUrl': 'r',
'token': 'r',
})
auth_check(user_client.schema, 'registrationToken', 'r', {
'accountId': 'r',
'created': 'r',
'description': 'r',
'uuid': 'r',
'image': 'r',
'command': 'r',
'registrationUrl': 'r',
'token': 'r',
})
auth_check(project_client.schema, 'registrationToken', 'cr', {
'accountId': 'r',
'created': 'r',
'description': 'cr',
'uuid': 'r',
'image': 'r',
'command': 'r',
'registrationUrl': 'r',
'token': 'r',
})
def test_type_documentation_auth(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'typeDocumentation', 'r', {
'resourceFields': 'r',
})
auth_check(user_client.schema, 'typeDocumentation', 'r', {
'resourceFields': 'r',
})
auth_check(project_client.schema, 'typeDocumentation', 'r', {
'resourceFields': 'r',
})
def test_stats_access_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'statsAccess', 'r', {
'token': 'r',
'url': 'r',
})
auth_check(user_client.schema, 'statsAccess', 'r', {
'token': 'r',
'url': 'r',
})
auth_check(project_client.schema, 'statsAccess', 'r', {
'token': 'r',
'url': 'r',
})
def test_account_resource_auth(admin_user_client):
resource_action_check(admin_user_client.schema, 'account', [
'update',
'activate',
'deactivate',
'restore',
'remove',
'purge',
'create',
])
def test_physical_host(admin_user_client, user_client, service_client,
project_client):
auth_check(admin_user_client.schema, 'physicalHost', 'r', {
'accountId': 'r',
'data': 'r',
'driver': 'r',
'externalId': 'r',
})
auth_check(user_client.schema, 'physicalHost', 'r', {
'accountId': 'r',
'driver': 'r',
'externalId': 'r',
})
auth_check(project_client.schema, 'physicalHost', 'r', {
'accountId': 'r',
'driver': 'r',
'externalId': 'r',
})
def test_registry_credentials(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'registryCredential', 'r', {
'accountId': 'r',
'data': 'r',
'email': 'r',
'publicValue': 'r',
'secretValue': 'ro',
'registryId': 'r',
})
auth_check(user_client.schema, 'registryCredential', 'r', {
'accountId': 'r',
'email': 'r',
'publicValue': 'r',
'secretValue': 'ro',
'registryId': 'r',
})
auth_check(project_client.schema, 'registryCredential', 'crud', {
'accountId': 'r',
'email': 'cru',
'publicValue': 'cru',
'secretValue': 'curo',
'registryId': 'cr',
})
def test_registry(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'registry', 'r', {
'accountId': 'r',
'data': 'r',
'driverName': 'r',
'externalId': 'r',
'serverAddress': 'r',
'volumeAccessMode': 'r',
'blockDevicePath': 'r',
'volumeCapabilities': 'r',
})
auth_check(user_client.schema, 'registry', 'r', {
'accountId': 'r',
'externalId': 'r',
'driverName': 'r',
'serverAddress': 'r',
'volumeAccessMode': 'r',
'blockDevicePath': 'r',
'volumeCapabilities': 'r',
})
auth_check(project_client.schema, 'registry', 'crud', {
'accountId': 'r',
'driverName': 'r',
'externalId': 'r',
'serverAddress': 'cr',
'volumeAccessMode': 'r',
'blockDevicePath': 'r',
'volumeCapabilities': 'r',
})
def test_container_events(admin_user_client, user_client, agent_client,
project_client):
auth_check(admin_user_client.schema, 'containerEvent', 'r', {
'externalTimestamp': 'r',
'hostId': 'r',
'accountId': 'r',
'externalFrom': 'r',
'reportedHostUuid': 'r',
'externalId': 'r',
'externalStatus': 'r',
'data': 'r',
'dockerInspect': 'r'
})
auth_check(agent_client.schema, 'containerEvent', 'cr', {
'externalTimestamp': 'cr',
'externalFrom': 'cr',
'reportedHostUuid': 'cr',
'externalId': 'cr',
'externalStatus': 'cr',
'dockerInspect': 'cr',
'data': 'cr',
'id': 'r'
})
auth_check(user_client.schema, 'containerEvent', 'r', {
'externalTimestamp': 'r',
'hostId': 'r',
'externalFrom': 'r',
'reportedHostUuid': 'r',
'externalId': 'r',
'externalStatus': 'r',
'accountId': 'r',
'dockerInspect': 'r'
})
auth_check(project_client.schema, 'containerEvent', 'r', {
'externalTimestamp': 'r',
'hostId': 'r',
'externalFrom': 'r',
'reportedHostUuid': 'r',
'externalId': 'r',
'externalStatus': 'r',
'accountId': 'r',
'dockerInspect': 'r'
})
def test_service_events(admin_user_client, user_client, agent_client,
project_client):
auth_check(admin_user_client.schema, 'serviceEvent', 'r', {
'externalTimestamp': 'r',
'hostId': 'r',
'instanceId': 'r',
'accountId': 'r',
'healthcheckUuid': 'r',
'reportedHealth': 'r',
'data': 'r',
})
auth_check(user_client.schema, 'serviceEvent', 'r', {
'externalTimestamp': 'r',
'hostId': 'r',
'instanceId': 'r',
'accountId': 'r',
'healthcheckUuid': 'r',
'reportedHealth': 'r',
})
auth_check(project_client.schema, 'serviceEvent', 'r', {
'externalTimestamp': 'r',
'hostId': 'r',
'instanceId': 'r',
'accountId': 'r',
'healthcheckUuid': 'r',
'reportedHealth': 'r',
})
auth_check(agent_client.schema, 'serviceEvent', 'cr', {
'externalTimestamp': 'cr',
'healthcheckUuid': 'cr',
'reportedHealth': 'cr',
})
def test_svc_discovery_service(admin_user_client, user_client, project_client,
project_admin_client):
auth_check(admin_user_client.schema, 'service', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'scale': 'r',
'launchConfig': 'r',
'accountId': 'r',
'data': 'r',
'upgrade': 'r',
'secondaryLaunchConfigs': 'r',
'vip': 'r',
'createIndex': 'r',
'metadata': 'r',
'selectorLink': 'r',
'selectorContainer': 'r',
'fqdn': 'r',
'publicEndpoints': 'r',
'retainIp': 'r',
'assignServiceIpAddress': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
auth_check(user_client.schema, 'service', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'scale': 'r',
'launchConfig': 'r',
'accountId': 'r',
'upgrade': 'r',
'secondaryLaunchConfigs': 'r',
'vip': 'r',
'createIndex': 'r',
'metadata': 'r',
'selectorLink': 'r',
'selectorContainer': 'r',
'fqdn': 'r',
'publicEndpoints': 'r',
'retainIp': 'r',
'assignServiceIpAddress': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
auth_check(project_client.schema, 'service', 'crud', {
'name': 'cru',
'externalId': 'cr',
'environmentId': 'cr',
'scale': 'cru',
'launchConfig': 'cr',
'accountId': 'r',
'upgrade': 'r',
'secondaryLaunchConfigs': 'cr',
'vip': 'cr',
'createIndex': 'r',
'metadata': 'cru',
'selectorLink': 'cru',
'selectorContainer': 'cru',
'fqdn': 'r',
'publicEndpoints': 'r',
'retainIp': 'cru',
'assignServiceIpAddress': 'cr',
'healthState': 'r',
'startOnCreate': 'cr',
'scalePolicy': 'cru',
'currentScale': 'r',
})
resource_action_check(user_client.schema, 'service', [
'activate',
'addservicelink',
'cancelupgrade',
'create',
'deactivate',
'finishupgrade',
'remove',
'removeservicelink',
'restart',
'rollback',
'setservicelinks',
'update',
'upgrade',
'cancelrollback',
])
resource_action_check(admin_user_client.schema, 'service', [
'activate',
'addservicelink',
'cancelupgrade',
'create',
'deactivate',
'finishupgrade',
'remove',
'removeservicelink',
'restart',
'rollback',
'setservicelinks',
'update',
'upgrade',
'cancelrollback',
])
resource_action_check(project_client.schema, 'service', [
'activate',
'addservicelink',
'cancelupgrade',
'create',
'deactivate',
'finishupgrade',
'remove',
'removeservicelink',
'restart',
'rollback',
'setservicelinks',
'update',
'upgrade',
'cancelrollback',
])
resource_action_check(project_admin_client.schema, 'service', [
'activate',
'addservicelink',
'cancelupgrade',
'certificate',
'create',
'deactivate',
'finishupgrade',
'remove',
'removeservicelink',
'restart',
'rollback',
'setservicelinks',
'update',
'upgrade',
'cancelrollback',
])
def test_auth_compose_project(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'composeProject', 'r', {
'name': 'r',
'accountId': 'r',
'data': 'r',
'templates': 'r',
'environment': 'r',
'externalId': 'r',
'previousExternalId': 'r',
'previousEnvironment': 'r',
'healthState': 'r',
})
auth_check(user_client.schema, 'composeProject', 'r', {
'name': 'r',
'accountId': 'r',
'templates': 'r',
'environment': 'r',
'externalId': 'r',
'previousExternalId': 'r',
'previousEnvironment': 'r',
'healthState': 'r',
})
auth_check(project_client.schema, 'composeProject', 'crud', {
'name': 'cr',
'accountId': 'r',
'templates': 'cr',
'environment': 'cr',
'externalId': 'cru',
'previousExternalId': 'cru',
'previousEnvironment': 'cru',
'healthState': 'r',
})
def test_auth_kubernetes_stack(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'kubernetesStack', 'r', {
'name': 'r',
'accountId': 'r',
'data': 'r',
'namespace': 'r',
'templates': 'r',
'environment': 'r',
'externalId': 'r',
'previousExternalId': 'r',
'previousEnvironment': 'r',
'healthState': 'r',
})
auth_check(user_client.schema, 'kubernetesStack', 'r', {
'name': 'r',
'accountId': 'r',
'namespace': 'r',
'templates': 'r',
'environment': 'r',
'externalId': 'r',
'previousExternalId': 'r',
'previousEnvironment': 'r',
'healthState': 'r',
})
auth_check(project_client.schema, 'kubernetesStack', 'crud', {
'name': 'cr',
'accountId': 'r',
'namespace': 'cr',
'templates': 'cr',
'environment': 'cr',
'externalId': 'cru',
'previousExternalId': 'cru',
'previousEnvironment': 'cru',
'healthState': 'r',
})
def test_svc_discovery_environment(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'environment', 'r', {
'name': 'r',
'accountId': 'r',
'data': 'r',
'dockerCompose': 'r',
'rancherCompose': 'r',
'environment': 'r',
'externalId': 'r',
'previousExternalId': 'r',
'previousEnvironment': 'r',
'outputs': 'r',
'startOnCreate': 'r',
'healthState': 'r',
})
auth_check(user_client.schema, 'environment', 'r', {
'name': 'r',
'accountId': 'r',
'dockerCompose': 'r',
'rancherCompose': 'r',
'environment': 'r',
'externalId': 'r',
'previousExternalId': 'r',
'previousEnvironment': 'r',
'outputs': 'r',
'startOnCreate': 'r',
'healthState': 'r',
})
auth_check(project_client.schema, 'environment', 'crud', {
'name': 'cru',
'accountId': 'r',
'dockerCompose': 'cr',
'rancherCompose': 'cr',
'environment': 'cr',
'externalId': 'cru',
'previousExternalId': 'cru',
'previousEnvironment': 'cru',
'outputs': 'cru',
'startOnCreate': 'cr',
'healthState': 'r',
})
def test_svc_discovery_lb_service(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'loadBalancerService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'scale': 'r',
'launchConfig': 'r',
'accountId': 'r',
'data': 'r',
'upgrade': 'r',
'loadBalancerConfig': 'r',
'vip': 'r',
'defaultCertificateId': 'r',
'certificateIds': 'r',
'metadata': 'r',
'selectorLink': 'r',
'fqdn': 'r',
'publicEndpoints': 'r',
'retainIp': 'r',
'assignServiceIpAddress': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
auth_check(user_client.schema, 'loadBalancerService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'scale': 'r',
'launchConfig': 'r',
'accountId': 'r',
'upgrade': 'r',
'loadBalancerConfig': 'r',
'vip': 'r',
'defaultCertificateId': 'r',
'certificateIds': 'r',
'metadata': 'r',
'selectorLink': 'r',
'fqdn': 'r',
'publicEndpoints': 'r',
'retainIp': 'r',
'assignServiceIpAddress': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
auth_check(project_client.schema, 'loadBalancerService', 'crud', {
'name': 'cru',
'externalId': 'cr',
'environmentId': 'cr',
'scale': 'cru',
'launchConfig': 'cr',
'accountId': 'r',
'upgrade': 'r',
'loadBalancerConfig': 'cru',
'vip': 'cr',
'defaultCertificateId': 'cru',
'certificateIds': 'cru',
'metadata': 'cru',
'selectorLink': 'cru',
'fqdn': 'r',
'publicEndpoints': 'r',
'retainIp': 'cru',
'assignServiceIpAddress': 'cr',
'healthState': 'r',
'startOnCreate': 'cr',
'scalePolicy': 'cru',
'currentScale': 'r',
})
def test_svc_discovery_consume_map(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'serviceConsumeMap', 'r', {
'name': 'r',
'serviceId': 'r',
'consumedServiceId': 'r',
'ports': 'r',
'data': 'r',
'accountId': 'r'
})
auth_check(user_client.schema, 'serviceConsumeMap', 'r', {
'name': 'r',
'serviceId': 'r',
'consumedServiceId': 'r',
'ports': 'r',
'accountId': 'r'
})
auth_check(project_client.schema, 'serviceConsumeMap', 'r', {
'name': 'r',
'serviceId': 'r',
'consumedServiceId': 'r',
'ports': 'r',
'accountId': 'r',
})
def test_auth_k8s_stack_upgrade(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'kubernetesStackUpgrade', 'r', {
'templates': 'r',
'environment': 'r',
'externalId': 'r',
})
auth_check(user_client.schema, 'kubernetesStackUpgrade', 'r', {
'templates': 'r',
'environment': 'r',
'externalId': 'r',
})
auth_check(project_client.schema, 'kubernetesStackUpgrade', 'cr', {
'templates': 'cr',
'environment': 'cr',
'externalId': 'cr',
})
def test_auth_env_upgrade(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'environmentUpgrade', 'r', {
'dockerCompose': 'r',
'rancherCompose': 'r',
'environment': 'r',
'externalId': 'r',
})
auth_check(user_client.schema, 'environmentUpgrade', 'r', {
'dockerCompose': 'r',
'rancherCompose': 'r',
'environment': 'r',
'externalId': 'r',
})
auth_check(project_client.schema, 'environmentUpgrade', 'cr', {
'dockerCompose': 'cr',
'rancherCompose': 'cr',
'environment': 'cr',
'externalId': 'cr',
})
def test_auth_service_upgrade(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'serviceUpgrade', 'r', {
'inServiceStrategy': 'r',
'toServiceStrategy': 'r'
})
auth_check(user_client.schema, 'serviceUpgrade', 'r', {
'inServiceStrategy': 'r',
'toServiceStrategy': 'r'
})
auth_check(project_client.schema, 'serviceUpgrade', 'cr', {
'inServiceStrategy': 'cr',
'toServiceStrategy': 'cr'
})
def test_auth_in_service_upgrade_strategy(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'inServiceUpgradeStrategy', 'r', {
'batchSize': 'r',
'intervalMillis': 'r',
'launchConfig': 'r',
'secondaryLaunchConfigs': 'r',
'previousLaunchConfig': 'r',
'previousSecondaryLaunchConfigs': 'r',
'startFirst': 'r',
})
auth_check(user_client.schema, 'inServiceUpgradeStrategy', 'r', {
'batchSize': 'r',
'intervalMillis': 'r',
'launchConfig': 'r',
'secondaryLaunchConfigs': 'r',
'previousLaunchConfig': 'r',
'previousSecondaryLaunchConfigs': 'r',
'startFirst': 'r',
})
auth_check(project_client.schema, 'inServiceUpgradeStrategy', 'cr', {
'batchSize': 'cr',
'intervalMillis': 'cr',
'launchConfig': 'cr',
'secondaryLaunchConfigs': 'cr',
'previousLaunchConfig': 'r',
'previousSecondaryLaunchConfigs': 'r',
'startFirst': 'cr',
})
def test_auth_to_service_upgrade_strategy(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'toServiceUpgradeStrategy', 'r', {
'updateLinks': 'r',
'toServiceId': 'r',
'batchSize': 'r',
'intervalMillis': 'r',
'finalScale': 'r'
})
auth_check(user_client.schema, 'toServiceUpgradeStrategy', 'r', {
'updateLinks': 'r',
'toServiceId': 'r',
'batchSize': 'r',
'intervalMillis': 'r',
'finalScale': 'r'
})
auth_check(project_client.schema, 'toServiceUpgradeStrategy', 'cr', {
'updateLinks': 'cr',
'toServiceId': 'cr',
'batchSize': 'cr',
'intervalMillis': 'cr',
'finalScale': 'cr'
})
def test_svc_discovery_expose_map(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'serviceExposeMap', 'r', {
'name': 'r',
'serviceId': 'r',
'instanceId': 'r',
'ipAddress': 'r',
'data': 'r',
'accountId': 'r',
'managed': 'r'
})
auth_check(user_client.schema, 'serviceExposeMap', 'r', {
'name': 'r',
'serviceId': 'r',
'instanceId': 'r',
'ipAddress': 'r',
'accountId': 'r',
'managed': 'r'
})
auth_check(project_client.schema, 'serviceExposeMap', 'r', {
'name': 'r',
'serviceId': 'r',
'instanceId': 'r',
'ipAddress': 'r',
'accountId': 'r',
'managed': 'r'
})
def test_svc_discovery_external_service(admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'externalService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'hostname': 'r',
'externalIpAddresses': 'r',
'accountId': 'r',
'data': 'r',
'upgrade': 'r',
'healthCheck': 'r',
'metadata': 'r',
'launchConfig': 'r',
'fqdn': 'r',
'healthState': 'r',
'startOnCreate': 'r',
})
auth_check(user_client.schema, 'externalService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'hostname': 'r',
'externalIpAddresses': 'r',
'accountId': 'r',
'upgrade': 'r',
'healthCheck': 'r',
'metadata': 'r',
'launchConfig': 'r',
'fqdn': 'r',
'healthState': 'r',
'startOnCreate': 'r',
})
auth_check(project_client.schema, 'externalService', 'crud', {
'name': 'cru',
'externalId': 'cr',
'environmentId': 'cr',
'hostname': 'cru',
'externalIpAddresses': 'cru',
'accountId': 'r',
'upgrade': 'r',
'healthCheck': 'cr',
'metadata': 'cru',
'launchConfig': 'cr',
'fqdn': 'r',
'healthState': 'r',
'startOnCreate': 'cr',
})
def test_pull_task(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'pullTask', 'r', {
'accountId': 'r',
'data': 'r',
'labels': 'r',
'mode': 'r',
'image': 'r',
'status': 'r',
})
auth_check(user_client.schema, 'pullTask', 'r', {
'accountId': 'r',
'labels': 'r',
'mode': 'r',
'image': 'r',
'status': 'r',
})
auth_check(project_client.schema, 'pullTask', 'cr', {
'accountId': 'r',
'labels': 'cr',
'mode': 'cr',
'image': 'cr',
'status': 'r',
})
def test_external_event(agent_client, admin_user_client, user_client,
project_client):
auth_check(admin_user_client.schema, 'externalEvent', 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
})
auth_check(user_client.schema, 'externalEvent', 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
})
auth_check(project_client.schema, 'externalEvent', 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
})
auth_check(agent_client.schema, 'externalEvent', 'r', {
'externalId': 'r',
'eventType': 'r',
})
def test_external_sp_event(agent_client, admin_user_client, user_client,
project_client):
type = 'externalStoragePoolEvent'
auth_check(admin_user_client.schema, type, 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'eventType': 'r',
'hostUuids': 'r',
'reportedAccountId': 'r',
'storagePool': 'r',
})
auth_check(user_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'hostUuids': 'r',
'reportedAccountId': 'r',
'storagePool': 'r',
})
auth_check(project_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'hostUuids': 'r',
'reportedAccountId': 'r',
'storagePool': 'r',
})
auth_check(agent_client.schema, type, 'cr', {
'externalId': 'cr',
'eventType': 'cr',
'hostUuids': 'cr',
'storagePool': 'cr',
})
def test_external_volume_event(agent_client, admin_user_client, user_client,
project_client):
type = 'externalVolumeEvent'
auth_check(admin_user_client.schema, type, 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'volume': 'r',
})
auth_check(user_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'volume': 'r',
})
auth_check(project_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'volume': 'r',
})
auth_check(agent_client.schema, type, 'cr', {
'externalId': 'cr',
'eventType': 'cr',
'volume': 'cr',
})
def test_external_dns_event(agent_client, admin_user_client, user_client,
project_client):
type = 'externalDnsEvent'
auth_check(admin_user_client.schema, type, 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'stackName': 'r',
'serviceName': 'r',
'fqdn': 'r'
})
auth_check(user_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'stackName': 'r',
'serviceName': 'r',
'fqdn': 'r'
})
auth_check(project_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'stackName': 'r',
'serviceName': 'r',
'fqdn': 'r'
})
auth_check(agent_client.schema, type, 'cr', {
'externalId': 'cr',
'eventType': 'cr',
'stackName': 'cr',
'serviceName': 'cr',
'fqdn': 'cr'
})
def test_external_service_event(agent_client, admin_user_client, user_client,
project_client):
type = 'externalServiceEvent'
auth_check(admin_user_client.schema, type, 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'environment': 'r',
'service': 'r',
})
auth_check(user_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'environment': 'r',
'service': 'r',
})
auth_check(project_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'environment': 'r',
'service': 'r',
})
auth_check(agent_client.schema, type, 'cr', {
'externalId': 'cr',
'eventType': 'cr',
'environment': 'cr',
'service': 'cr',
})
def test_external_host_event(agent_client, admin_user_client, user_client,
project_client):
type = 'externalHostEvent'
auth_check(admin_user_client.schema, type, 'r', {
'accountId': 'r',
'data': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'hostLabel': 'r',
'deleteHost': 'r',
'hostId': 'r',
})
auth_check(user_client.schema, type, 'r', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'r',
'reportedAccountId': 'r',
'hostLabel': 'r',
'deleteHost': 'r',
'hostId': 'r',
})
auth_check(project_client.schema, type, 'cr', {
'accountId': 'r',
'externalId': 'r',
'eventType': 'cr',
'reportedAccountId': 'r',
'hostLabel': 'cr',
'deleteHost': 'cr',
'hostId': 'cr',
})
def test_virtual_machine(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'virtualMachine', 'r', {
'accountId': 'r',
'agentId': 'r',
'allocationState': 'r',
'blkioDeviceOptions': 'r',
'command': 'r',
'count': 'r',
'cpuSet': 'r',
'cpuShares': 'r',
'created': 'r',
'data': 'r',
'description': 'r',
'dns': 'r',
'dnsSearch': 'r',
'domainName': 'r',
'firstRunning': 'r',
'hostId': 'r',
'hostname': 'r',
'id': 'r',
'imageUuid': 'r',
'instanceLinks': 'r',
'memory': 'r',
'memorySwap': 'r',
'networkIds': 'r',
'networkMode': 'r',
'ports': 'r',
'primaryIpAddress': 'r',
'removeTime': 'r',
'registryCredentialId': 'r',
'requestedHostId': 'r',
'restartPolicy': 'r',
'startOnCreate': 'r',
'token': 'r',
'systemContainer': 'r',
'nativeContainer': 'r',
'externalId': 'r',
'labels': 'r',
'healthCheck': 'r',
'healthState': 'r',
'securityOpt': 'r',
'logConfig': 'r',
'volumeDriver': 'r',
'extraHosts': 'r',
'expose': 'r',
'createIndex': 'r',
'deploymentUnitUuid': 'r',
'version': 'r',
'startCount': 'r',
'vcpu': 'r',
'userdata': 'r',
'memoryMb': 'r',
'disks': 'r',
})
auth_check(user_client.schema, 'virtualMachine', 'r', {
'accountId': 'r',
'blkioDeviceOptions': 'r',
'command': 'r',
'count': 'r',
'cpuSet': 'r',
'cpuShares': 'r',
'created': 'r',
'description': 'r',
'dns': 'r',
'dnsSearch': 'r',
'domainName': 'r',
'firstRunning': 'r',
'hostId': 'r',
'hostname': 'r',
'id': 'r',
'imageUuid': 'r',
'instanceLinks': 'r',
'memory': 'r',
'memorySwap': 'r',
'networkIds': 'r',
'networkMode': 'r',
'ports': 'r',
'primaryIpAddress': 'r',
'registryCredentialId': 'r',
'requestedHostId': 'r',
'restartPolicy': 'r',
'startOnCreate': 'r',
'systemContainer': 'r',
'nativeContainer': 'r',
'externalId': 'r',
'labels': 'r',
'healthCheck': 'r',
'healthState': 'r',
'securityOpt': 'r',
'logConfig': 'r',
'extraHosts': 'r',
'volumeDriver': 'r',
'expose': 'r',
'createIndex': 'r',
'deploymentUnitUuid': 'r',
'version': 'r',
'startCount': 'r',
'vcpu': 'r',
'userdata': 'r',
'memoryMb': 'r',
'disks': 'r',
})
auth_check(project_client.schema, 'virtualMachine', 'crud', {
'accountId': 'r',
'blkioDeviceOptions': 'cr',
'command': 'cr',
'count': 'cr',
'cpuSet': 'cr',
'cpuShares': 'cr',
'created': 'r',
'description': 'cru',
'dns': 'cr',
'dnsSearch': 'cr',
'domainName': 'cr',
'firstRunning': 'r',
'hostId': 'r',
'hostname': 'cr',
'id': 'r',
'imageUuid': 'cr',
'instanceLinks': 'cr',
'memory': 'cr',
'memorySwap': 'cr',
'networkIds': 'cr',
'networkMode': 'cr',
'ports': 'cr',
'primaryIpAddress': 'r',
'registryCredentialId': 'cr',
'requestedHostId': 'cr',
'restartPolicy': 'cr',
'startOnCreate': 'cr',
'systemContainer': 'r',
'nativeContainer': 'r',
'externalId': 'r',
'labels': 'cr',
'healthCheck': 'cr',
'healthState': 'r',
'securityOpt': 'cr',
'logConfig': 'cr',
'extraHosts': 'cr',
'volumeDriver': 'cr',
'expose': 'cr',
'createIndex': 'r',
'deploymentUnitUuid': 'r',
'version': 'r',
'startCount': 'r',
'vcpu': 'cr',
'userdata': 'cr',
'memoryMb': 'cr',
'disks': 'cr',
})
def test_virtual_machine_disk(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'virtualMachineDisk', 'r', {
'name': 'r',
'size': 'r',
'readIops': 'r',
'writeIops': 'r',
'opts': 'r',
'driver': 'r',
'root': 'r',
})
auth_check(user_client.schema, 'virtualMachineDisk', 'r', {
'name': 'r',
'size': 'r',
'readIops': 'r',
'writeIops': 'r',
'opts': 'r',
'driver': 'r',
'root': 'r',
})
auth_check(project_client.schema, 'virtualMachineDisk', 'cr', {
'name': 'cr',
'size': 'cr',
'readIops': 'cr',
'writeIops': 'cr',
'opts': 'cr',
'driver': 'cr',
'root': 'cr',
})
def test_compose_service(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'composeService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'accountId': 'r',
'data': 'r',
'vip': 'r',
'selectorContainer': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'launchConfig': 'r',
'fqdn': 'r',
'selectorLink': 'r',
'scale': 'r',
'publicEndpoints': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
auth_check(user_client.schema, 'composeService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'accountId': 'r',
'vip': 'r',
'selectorContainer': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'launchConfig': 'r',
'fqdn': 'r',
'selectorLink': 'r',
'scale': 'r',
'publicEndpoints': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
auth_check(project_client.schema, 'composeService', 'rd', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'accountId': 'r',
'vip': 'r',
'selectorContainer': 'r',
'healthState': 'r',
'startOnCreate': 'r',
'launchConfig': 'r',
'fqdn': 'r',
'selectorLink': 'r',
'scale': 'r',
'publicEndpoints': 'r',
'scalePolicy': 'r',
'currentScale': 'r',
})
def test_machine_driver(admin_user_client, user_client, project_client,
service_client):
auth_check(admin_user_client.schema, 'machineDriver', 'crud', {
'name': 'r',
'checksum': 'cru',
'externalId': 'cru',
'uiUrl': 'cru',
'data': 'r',
'url': 'cru',
'defaultActive': 'r',
'builtin': 'cr',
'activateOnCreate': 'cr',
})
auth_check(service_client.schema, 'machineDriver', 'crud', {
'name': 'cru',
'checksum': 'cru',
'externalId': 'cru',
'uiUrl': 'cru',
'data': 'r',
'url': 'cru',
'defaultActive': 'r',
'builtin': 'cr',
'activateOnCreate': 'cr',
})
auth_check(user_client.schema, 'machineDriver', 'r', {
'name': 'r',
'checksum': 'r',
'externalId': 'r',
'uiUrl': 'r',
'url': 'r',
'defaultActive': 'r',
'builtin': 'r',
'activateOnCreate': 'r',
})
auth_check(project_client.schema, 'machineDriver', 'r', {
'name': 'r',
'checksum': 'r',
'externalId': 'r',
'uiUrl': 'r',
'url': 'r',
'defaultActive': 'r',
'builtin': 'r',
'activateOnCreate': 'r',
})
def test_kubernetes_service(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'kubernetesService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'accountId': 'r',
'data': 'r',
'vip': 'r',
'selectorContainer': 'r',
'template': 'r',
'healthState': 'r',
})
auth_check(user_client.schema, 'kubernetesService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'accountId': 'r',
'vip': 'r',
'selectorContainer': 'r',
'template': 'r',
'healthState': 'r',
})
auth_check(project_client.schema, 'kubernetesService', 'r', {
'name': 'r',
'externalId': 'r',
'environmentId': 'r',
'accountId': 'r',
'vip': 'r',
'selectorContainer': 'r',
'template': 'r',
'healthState': 'r',
})
def test_machine(admin_user_client, user_client, project_client,
service_client):
auth_check(project_client.schema, 'machine', 'crd', {
'accountId': 'r',
'name': 'cr',
'driver': 'r',
'externalId': 'r',
'engineRegistryMirror': 'cr',
'authCertificateAuthority': 'cr',
'authKey': 'cr',
'labels': 'cr',
'engineInstallUrl': 'cr',
'dockerVersion': 'cr',
'engineOpt': 'cr',
'engineInsecureRegistry': 'cr',
'engineLabel': 'cr',
'engineStorageDriver': 'cr',
'engineEnv': 'cr',
})
auth_check(service_client.schema, 'machine', 'crud', {
'accountId': 'r',
'name': 'cr',
'driver': 'r',
'data': 'r',
'extractedConfig': 'ru',
'externalId': 'r',
'engineRegistryMirror': 'cr',
'authCertificateAuthority': 'cr',
'authKey': 'cr',
'labels': 'cru',
'engineInstallUrl': 'cr',
'dockerVersion': 'cr',
'engineOpt': 'cr',
'engineInsecureRegistry': 'cr',
'engineLabel': 'cr',
'engineStorageDriver': 'cr',
'engineEnv': 'cr',
})
auth_check(admin_user_client.schema, 'machine', 'r', {
'accountId': 'r',
'name': 'r',
'data': 'r',
'driver': 'r',
'externalId': 'r',
'engineRegistryMirror': 'r',
'authCertificateAuthority': 'r',
'authKey': 'r',
'labels': 'r',
'engineInstallUrl': 'r',
'dockerVersion': 'r',
'engineOpt': 'r',
'engineInsecureRegistry': 'r',
'engineLabel': 'r',
'engineStorageDriver': 'r',
'engineEnv': 'r',
})
auth_check(user_client.schema, 'machine', 'r', {
'accountId': 'r',
'name': 'r',
'driver': 'r',
'externalId': 'r',
'engineRegistryMirror': 'r',
'authCertificateAuthority': 'r',
'authKey': 'r',
'labels': 'r',
'engineInstallUrl': 'r',
'dockerVersion': 'r',
'engineOpt': 'r',
'engineInsecureRegistry': 'r',
'engineLabel': 'r',
'engineStorageDriver': 'r',
'engineEnv': 'r',
})
def test_snapshot_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'snapshot', 'r', {
'accountId': 'r',
'data': 'r',
'volumeId': 'r',
})
auth_check(user_client.schema, 'snapshot', 'r', {
'accountId': 'r',
'volumeId': 'r',
})
auth_check(project_client.schema, 'snapshot', 'rd', {
'accountId': 'r',
'volumeId': 'r',
})
def test_backup_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'backup', 'r', {
'accountId': 'r',
'data': 'r',
'backupTargetId': 'r',
'snapshotId': 'r',
'uri': 'r',
'volumeId': 'r',
})
auth_check(user_client.schema, 'backup', 'r', {
'accountId': 'r',
'backupTargetId': 'r',
'snapshotId': 'r',
'uri': 'r',
'volumeId': 'r',
})
auth_check(project_client.schema, 'backup', 'rd', {
'accountId': 'r',
'backupTargetId': 'r',
'snapshotId': 'r',
'uri': 'r',
'volumeId': 'r',
})
def test_backup_target_auth(admin_user_client, user_client, project_client):
auth_check(admin_user_client.schema, 'backupTarget', 'r', {
'accountId': 'r',
'data': 'r',
'nfsConfig': 'r',
})
auth_check(user_client.schema, 'backupTarget', 'r', {
'accountId': 'r',
'nfsConfig': 'r',
})
auth_check(project_client.schema, 'backupTarget', 'crd', {
'accountId': 'r',
'nfsConfig': 'cr',
})
| jimengliu/cattle | tests/integration-v1/cattletest/core/test_authorization.py | Python | apache-2.0 | 80,099 |
#!/usr/bin/python
"""
Copyright 2014 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
Expectations on local disk that we can modify.
"""
# System-level imports
import logging
import os
import re
# Must fix up PYTHONPATH before importing from within Skia
import rs_fixpypath # pylint: disable=W0611
# Imports from within Skia
from py.utils import git_utils
import compare_rendered_pictures
import gm_json
import imagepair
import results
FILEPATH_RE = re.compile('.+/' + gm_json.IMAGE_FILENAME_PATTERN)
SKIA_REPO = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir, '.git'))
class WritableExpectations(git_utils.NewGitCheckout):
"""Expectations on local disk that we can modify."""
def __init__(self, set_descriptions):
"""Creates a sandbox on local disk containing writable expectations.
You must use the 'with' statement to create this object in such a way that
it cleans up after itself:
with WritableExpectations(*args) as writable_expectations:
# make modifications
# use the modified results
# the sandbox on local disk is automatically cleaned up here
Args:
set_descriptions: SET_DESCRIPTIONS dict describing the set we want to
update expectations within; this tells us the subdirectory within the
Skia repo where we keep these expectations, and the commithash at
which the user evaluated new baselines.
"""
file_section = set_descriptions[results.KEY__SET_DESCRIPTIONS__SECTION]
assert file_section == gm_json.JSONKEY_EXPECTEDRESULTS
source_dir = _unicode_to_ascii(
set_descriptions[results.KEY__SET_DESCRIPTIONS__DIR])
assert source_dir.startswith(compare_rendered_pictures.REPO_URL_PREFIX)
repo_subdir = source_dir[len(compare_rendered_pictures.REPO_URL_PREFIX):]
repo_revision = _unicode_to_ascii(
set_descriptions[results.KEY__SET_DESCRIPTIONS__REPO_REVISION])
logging.info('Creating a writable Skia checkout at revision "%s"...' %
repo_revision)
super(WritableExpectations, self).__init__(
repository=SKIA_REPO, commit=repo_revision, subdir=repo_subdir)
def modify(self, modifications):
"""Modify the contents of the checkout, using modifications from the UI.
Args:
modifications: data[KEY__LIVE_EDITS__MODIFICATIONS] coming back from the
rebaseline_server UI frontend
"""
logging.info('Reading in dicts from writable Skia checkout in %s ...' %
self.root)
dicts = results.BaseComparisons.read_dicts_from_root(self.root)
# Make sure we have expected-results sections in all our output dicts.
for pathname, adict in dicts.iteritems():
if not adict:
adict = {
# TODO(stephana): These values should be defined as constants
# somewhere, to be kept in sync between this file and
# compare_rendered_pictures.py.
gm_json.JSONKEY_HEADER: {
gm_json.JSONKEY_HEADER_TYPE: 'ChecksummedImages',
gm_json.JSONKEY_HEADER_REVISION: 1,
}
}
if not adict.get(gm_json.JSONKEY_EXPECTEDRESULTS, None):
adict[gm_json.JSONKEY_EXPECTEDRESULTS] = {}
dicts[pathname] = adict
for modification in modifications:
expectations = modification[imagepair.KEY__IMAGEPAIRS__EXPECTATIONS]
_add_image_info_to_expectations(
expectations=expectations,
filepath=modification[imagepair.KEY__IMAGEPAIRS__IMAGE_B_URL])
extra_columns = modification[imagepair.KEY__IMAGEPAIRS__EXTRACOLUMNS]
dictname = modification[imagepair.KEY__IMAGEPAIRS__SOURCE_JSON_FILE]
dict_to_modify = dicts[dictname][gm_json.JSONKEY_EXPECTEDRESULTS]
test_name = extra_columns[compare_rendered_pictures.COLUMN__SOURCE_SKP]
test_record = dict_to_modify.get(test_name, {})
if (extra_columns[compare_rendered_pictures.COLUMN__TILED_OR_WHOLE] ==
compare_rendered_pictures.COLUMN__TILED_OR_WHOLE__TILED):
test_tiles_list = test_record.get(
gm_json.JSONKEY_SOURCE_TILEDIMAGES, [])
tilenum = int(extra_columns[compare_rendered_pictures.COLUMN__TILENUM])
_replace_list_item(test_tiles_list, tilenum, expectations)
test_record[gm_json.JSONKEY_SOURCE_TILEDIMAGES] = test_tiles_list
else:
test_record[gm_json.JSONKEY_SOURCE_WHOLEIMAGE] = expectations
dict_to_modify[test_name] = test_record
# Write the modified files back to disk.
self._write_dicts_to_root(meta_dict=dicts, root=self.root)
def get_diffs(self):
"""Return patchfile describing any modifications to this checkout."""
return self._run_in_git_root(args=[git_utils.GIT, 'diff'])
@staticmethod
def _write_dicts_to_root(meta_dict, root):
"""Write out multiple dictionaries in JSON format.
Args:
meta_dict: a builder-keyed meta-dictionary containing all the JSON
dictionaries we want to write out
root: path to root of directory tree within which to write files
"""
if not os.path.isdir(root):
raise IOError('no directory found at path %s' % root)
for rel_path in meta_dict.keys():
full_path = os.path.join(root, rel_path)
gm_json.WriteToFile(meta_dict[rel_path], full_path)
def _unicode_to_ascii(unicode_string):
"""Returns the plain ASCII form of a unicode string.
TODO(stephana): We created this because we get unicode strings out of the
JSON file, while the git filenames and revision tags are plain ASCII.
There may be a better way to handle this... maybe set the JSON util to just
return ASCII strings?
"""
return unicode_string.encode('ascii', 'ignore')
def _replace_list_item(a_list, index, value):
"""Replaces value at index "index" within a_list.
Args:
a_list: a list
index: index indicating which item in a_list to replace
value: value to set a_list[index] to
If a_list does not contain this index, it will be extended with None entries
to that length.
"""
length = len(a_list)
while index >= length:
a_list.append(None)
length += 1
a_list[index] = value
def _add_image_info_to_expectations(expectations, filepath):
"""Add JSONKEY_IMAGE_* info to an existing expectations dictionary.
TODO(stephana): This assumes that the checksumAlgorithm and checksumValue
can be derived from the filepath, which is currently true but may not always
be true.
Args:
expectations: the expectations dict to augment
filepath: relative path to the image file
"""
(checksum_algorithm, checksum_value) = FILEPATH_RE.match(filepath).groups()
expectations[gm_json.JSONKEY_IMAGE_CHECKSUMALGORITHM] = checksum_algorithm
expectations[gm_json.JSONKEY_IMAGE_CHECKSUMVALUE] = checksum_value
expectations[gm_json.JSONKEY_IMAGE_FILEPATH] = filepath
| mxOBS/deb-pkg_trusty_chromium-browser | third_party/skia/gm/rebaseline_server/writable_expectations.py | Python | bsd-3-clause | 6,889 |
# Copyright The IETF Trust 2007, All Rights Reserved
import datetime, re
from django.conf import settings
from django.contrib.syndication.feeds import Feed, FeedDoesNotExist
from django.utils.feedgenerator import Atom1Feed
from django.core.urlresolvers import reverse as urlreverse
from django.template.defaultfilters import truncatewords_html, date as datefilter, linebreaks
from django.utils.html import strip_tags
from django.utils.text import truncate_words
from ietf.doc.models import *
from ietf.doc.utils import augment_events_with_revision
from ietf.idtracker.templatetags.ietf_filters import format_textarea
class DocumentChanges(Feed):
feed_type = Atom1Feed
def get_object(self, bits):
if len(bits) != 1:
raise Document.DoesNotExist
return Document.objects.get(docalias__name=bits[0])
def title(self, obj):
return "Changes for %s" % obj.display_name()
def link(self, obj):
if obj is None:
raise FeedDoesNotExist
if not hasattr(self, "cached_link"):
self.cached_link = urlreverse("doc_history", kwargs=dict(name=obj.canonical_name()))
return self.cached_link
def subtitle(self, obj):
return "History of change entries for %s." % obj.display_name()
def items(self, obj):
events = obj.docevent_set.all().order_by("-time","-id")
augment_events_with_revision(obj, events)
return events
def item_title(self, item):
return u"[%s] %s [rev. %s]" % (item.by, truncate_words(strip_tags(item.desc), 15), item.rev)
def item_description(self, item):
return truncatewords_html(format_textarea(item.desc), 20)
def item_pubdate(self, item):
return item.time
def item_author_name(self, item):
return unicode(item.by)
def item_link(self, item):
return self.cached_link + "#history-%s" % item.pk
class InLastCall(Feed):
title = "Documents in Last Call"
subtitle = "Announcements for documents in last call."
feed_type = Atom1Feed
author_name = 'IESG Secretary'
link = "/doc/iesg/last-call/"
def items(self):
docs = list(Document.objects.filter(type="draft", states=State.objects.get(type="draft-iesg", slug="lc")))
for d in docs:
d.lc_event = d.latest_event(LastCallDocEvent, type="sent_last_call")
docs = [d for d in docs if d.lc_event]
docs.sort(key=lambda d: d.lc_event.expires)
return docs
def item_title(self, item):
return u"%s (%s - %s)" % (item.name,
datefilter(item.lc_event.time, "F j"),
datefilter(item.lc_event.expires, "F j, Y"))
def item_description(self, item):
return linebreaks(item.lc_event.desc)
def item_pubdate(self, item):
return item.lc_event.time
| mcr/ietfdb | ietf/doc/feeds.py | Python | bsd-3-clause | 2,790 |
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
from os.path import dirname, isfile, join
from unittest import TestCase
from yaml import safe_load
from yaml.constructor import ConstructorError
from octodns.record import Create
from octodns.provider.yaml import YamlProvider
from octodns.zone import SubzoneRecordException, Zone
from helpers import TemporaryDirectory
class TestYamlProvider(TestCase):
def test_provider(self):
source = YamlProvider('test', join(dirname(__file__), 'config'))
zone = Zone('unit.tests.', [])
# With target we don't add anything
source.populate(zone, target=source)
self.assertEquals(0, len(zone.records))
# without it we see everything
source.populate(zone)
self.assertEquals(15, len(zone.records))
# Assumption here is that a clean round-trip means that everything
# worked as expected, data that went in came back out and could be
# pulled in yet again and still match up. That assumes that the input
# data completely exercises things. This assumption can be tested by
# relatively well by running
# ./script/coverage tests/test_octodns_provider_yaml.py and
# looking at the coverage file
# ./htmlcov/octodns_provider_yaml_py.html
with TemporaryDirectory() as td:
# Add some subdirs to make sure that it can create them
directory = join(td.dirname, 'sub', 'dir')
yaml_file = join(directory, 'unit.tests.yaml')
target = YamlProvider('test', directory)
# We add everything
plan = target.plan(zone)
self.assertEquals(13, len(filter(lambda c: isinstance(c, Create),
plan.changes)))
self.assertFalse(isfile(yaml_file))
# Now actually do it
self.assertEquals(13, target.apply(plan))
self.assertTrue(isfile(yaml_file))
# There should be no changes after the round trip
reloaded = Zone('unit.tests.', [])
target.populate(reloaded)
self.assertFalse(zone.changes(reloaded, target=source))
# A 2nd sync should still create everything
plan = target.plan(zone)
self.assertEquals(13, len(filter(lambda c: isinstance(c, Create),
plan.changes)))
with open(yaml_file) as fh:
data = safe_load(fh.read())
# these are stored as plural 'values'
for r in data['']:
self.assertTrue('values' in r)
self.assertTrue('values' in data['mx'])
self.assertTrue('values' in data['naptr'])
self.assertTrue('values' in data['_srv._tcp'])
self.assertTrue('values' in data['txt'])
# these are stored as singular 'value'
self.assertTrue('value' in data['aaaa'])
self.assertTrue('value' in data['ptr'])
self.assertTrue('value' in data['spf'])
self.assertTrue('value' in data['www'])
def test_empty(self):
source = YamlProvider('test', join(dirname(__file__), 'config'))
zone = Zone('empty.', [])
# without it we see everything
source.populate(zone)
self.assertEquals(0, len(zone.records))
def test_unsorted(self):
source = YamlProvider('test', join(dirname(__file__), 'config'))
zone = Zone('unordered.', [])
with self.assertRaises(ConstructorError):
source.populate(zone)
source = YamlProvider('test', join(dirname(__file__), 'config'),
enforce_order=False)
# no exception
source.populate(zone)
self.assertEqual(2, len(zone.records))
def test_subzone_handling(self):
source = YamlProvider('test', join(dirname(__file__), 'config'))
# If we add `sub` as a sub-zone we'll reject `www.sub`
zone = Zone('unit.tests.', ['sub'])
with self.assertRaises(SubzoneRecordException) as ctx:
source.populate(zone)
self.assertEquals('Record www.sub.unit.tests. is under a managed '
'subzone', ctx.exception.message)
| h-hwang/octodns | tests/test_octodns_provider_yaml.py | Python | mit | 4,373 |
from django.http import HttpResponse
from .models import Entry
import socket
import datetime
def index(request):
current_hostname = socket.gethostname()
current_host = socket.gethostbyname(current_hostname)
entry = Entry(contents=u"%s (%s)" % (current_host, current_hostname), created_on=datetime.datetime.now()).save()
entries = Entry.objects.order_by('-created_on')[:25]
output = '<br/>'.join([str(e.id) + ' ' + e.contents for e in entries])
return HttpResponse(output)
| nineinchnick/django-demo | code/whoami/views.py | Python | mit | 498 |
from bottle import route, request, run
from textblob import TextBlob
from marshmallow import Schema, fields
class BlobSchema(Schema):
polarity = fields.Float()
subjectivity = fields.Float()
chunks = fields.List(fields.String, attribute="noun_phrases")
tags = fields.Raw()
discrete_sentiment = fields.Method("get_discrete_sentiment")
word_count = fields.Function(lambda obj: len(obj.words))
def get_discrete_sentiment(self, obj):
if obj.polarity > 0.1:
return 'positive'
elif obj.polarity < -0.1:
return 'negative'
else:
return 'neutral'
blob_schema = BlobSchema()
@route("/api/v1/analyze", method="POST")
def analyze():
blob = TextBlob(request.json['text'])
result = blob_schema.dump(blob)
return result.data
run(reloader=True, port=5000)
| mwstobo/marshmallow | examples/textblob_example.py | Python | mit | 845 |
"""Parse command line options and execute it.
Built on top of getopt. optparse can't handle sub-commands.
"""
import getopt
import copy
from collections import OrderedDict
class DefaultUpdate(dict):
"""A dictionary that has an "update_defaults" method where
only items with default values are updated.
This is used when you have a dict that has multiple source of values
(i.e. hardcoded, config file, command line). And values are updated
beggining from the source with higher priority.
A default value is added with the method set_default or add_defaults.
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
# set of keys that have a non-default value
self._non_default_keys = set()
def set_default(self, key, value):
"""set default value for given key"""
dict.__setitem__(self, key, value)
def add_defaults(self, source):
"""add default values from another dict
@param source: (dict)"""
for key, value in source.items():
if key not in self:
self.set_default(key, value)
def update_defaults(self, update_dict):
"""like dict.update but do not update items that have
a non-default value"""
for key, value in update_dict.items():
if key in self._non_default_keys:
continue
self[key] = value
def __setitem__(self, key, value):
"""overwrite to keep track of _non_default_keys"""
try:
self._non_default_keys.add(key)
# http://bugs.python.org/issue826897
except AttributeError:
self._non_default_keys = set()
self._non_default_keys.add(key)
dict.__setitem__(self, key, value)
class CmdParseError(Exception):
"""Error parsing options """
class CmdOption(object):
"""a command line option
- name (string) : variable name
- default (value from its type): default value
- type (type): type of the variable. must be able to be initialized
taking a single string parameter.
if type is bool. option is just a flag. and if present
its value is set to True.
- short (string): argument short name
- long (string): argument long name
- inverse (string): argument long name to be the inverse of the default
value (only used by boolean options)
- choices(list - 2-tuple str): sequence of 2-tuple of choice name,
choice description.
- help (string): option description
"""
def __init__(self, opt_dict):
# options must contain 'name' and 'default' value
opt_dict = opt_dict.copy()
for field in ('name', 'default',):
if field not in opt_dict:
msg = "CmdOption dict %r missing required property '%s'"
raise CmdParseError(msg % (opt_dict, field))
self.name = opt_dict.pop('name')
self.type = opt_dict.pop('type', str)
self.set_default(opt_dict.pop('default'))
self.short = opt_dict.pop('short', '')
self.long = opt_dict.pop('long', '')
self.inverse = opt_dict.pop('inverse', '')
self.choices = dict(opt_dict.pop('choices', []))
self.help = opt_dict.pop('help', '')
# TODO add some hint for tab-completion scripts
# options can not contain any unrecognized field
if opt_dict:
msg = "CmdOption dict contains invalid property '%s'"
raise CmdParseError(msg % list(opt_dict.keys()))
def __repr__(self):
tmpl = ("{0}({{'name':{1.name!r}, 'short':{1.short!r}," +
"'long':{1.long!r} }})")
return tmpl.format(self.__class__.__name__, self)
def set_default(self, val):
"""set default value, value is already the expected type"""
if self.type is list:
self.default = copy.copy(val)
else:
self.default = val
def validate_choice(self, given_value):
"""raise error is value is not a valid choice"""
if given_value not in self.choices:
msg = ("Error parsing parameter '{}'. "
"Provided '{}' but available choices are: {}.")
choices = ("'{}'".format(k) for k in self.choices.keys())
choices_str = ", ".join(choices)
raise CmdParseError(msg.format(self.name, given_value, choices_str))
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def str2boolean(self, str_val):
"""convert string to boolean"""
try:
return self._boolean_states[str_val.lower()]
except:
raise ValueError('Not a boolean: {}'.format(str_val))
def str2type(self, str_val):
"""convert string value to option type value"""
try:
# no coversion if value is not a string
if not isinstance(str_val, str):
val = str_val
elif self.type is bool:
val = self.str2boolean(str_val)
elif self.type is list:
parts = [p.strip() for p in str_val.split(',')]
val = [p for p in parts if p] # remove empty strings
else:
val = self.type(str_val)
except ValueError as exception:
msg = "Error parsing parameter '{}' {}.\n{}\n"
raise CmdParseError(msg.format(self.name, self.type,
str(exception)))
if self.choices:
self.validate_choice(val)
return val
@staticmethod
def _print_2_columns(col1, col2):
"""print using a 2-columns format """
column1_len = 24
column2_start = 28
left = (col1).ljust(column1_len)
right = col2.replace('\n', '\n'+ column2_start * ' ')
return " %s %s" % (left, right)
def help_param(self):
"""return string of option's short and long name
i.e.: -f ARG, --file=ARG
"""
# TODO replace 'ARG' with metavar (copy from optparse)
opts_str = []
if self.short:
if self.type is bool:
opts_str.append('-%s' % self.short)
else:
opts_str.append('-%s ARG' % self.short)
if self.long:
if self.type is bool:
opts_str.append('--%s' % self.long)
else:
opts_str.append('--%s=ARG' % self.long)
return ', '.join(opts_str)
def help_choices(self):
"""return string with help for option choices"""
if not self.choices:
return ''
# if choice has a description display one choice per line...
if any(self.choices.values()):
items = []
for choice in sorted(self.choices):
items.append("\n{}: {}".format(choice, self.choices[choice]))
return "\nchoices:" + "".join(items)
# ... otherwise display in a single line
else:
return "\nchoices: " + ", ".join(sorted(self.choices.keys()))
def help_doc(self):
"""return list of string of option's help doc"""
# ignore option that cant be modified on cmd line
if not (self.short or self.long):
return []
text = []
opt_str = self.help_param()
# TODO It should always display option's default value
opt_help = self.help % {'default': self.default}
opt_choices = self.help_choices()
text.append(self._print_2_columns(opt_str, opt_help + opt_choices))
# print bool inverse option
if self.inverse:
opt_str = '--%s' % self.inverse
opt_help = 'opposite of --%s' % self.long
text.append(self._print_2_columns(opt_str, opt_help))
return text
class CmdParse(object):
"""Process string with command options
@ivar options: (list - CmdOption)
"""
_type = "Command"
def __init__(self, options):
self._options = OrderedDict((o.name, o) for o in options)
def __contains__(self, key):
return key in self._options
def __getitem__(self, key):
return self._options[key]
@property
def options(self):
"""return list of options for backward compatibility"""
return list(self._options.values())
def get_short(self):
"""return string with short options for getopt"""
short_list = ""
for opt in self._options.values():
if not opt.short:
continue
short_list += opt.short
# ':' means option takes a value
if opt.type is not bool:
short_list += ':'
return short_list
def get_long(self):
"""return list with long options for getopt"""
long_list = []
for opt in self._options.values():
long_name = opt.long
if not long_name:
continue
# '=' means option takes a value
if opt.type is not bool:
long_name += '='
long_list.append(long_name)
if opt.inverse:
long_list.append(opt.inverse)
return long_list
def get_option(self, opt_str):
"""return tuple
- CmdOption from matching opt_str. or None
- (bool) matched inverse
"""
for opt in self._options.values():
if opt_str in ('-' + opt.short, '--' + opt.long):
return opt, False
if opt_str == '--' + opt.inverse:
return opt, True
return None, None
def overwrite_defaults(self, new_defaults):
"""overwrite self.options default values
This values typically come from an INI file
"""
for key, val in new_defaults.items():
if key in self._options:
opt = self._options[key]
opt.set_default(opt.str2type(val))
def parse(self, in_args):
"""parse arguments into options(params) and positional arguments
@param in_args (list - string): typically sys.argv[1:]
@return params, args
params(dict): params contain the actual values from the options.
where the key is the name of the option.
pos_args (list - string): positional arguments
"""
params = DefaultUpdate()
# add default values
for opt in self._options.values():
params.set_default(opt.name, opt.default)
# parse options using getopt
try:
opts, args = getopt.getopt(in_args, self.get_short(),
self.get_long())
except Exception as error:
msg = "Error parsing %s: %s (parsing options: %s)"
raise CmdParseError(msg % (self._type, str(error), in_args))
# update params with values from command line
for opt, val in opts:
this, inverse = self.get_option(opt)
if this.type is bool:
params[this.name] = not inverse
elif this.type is list:
params[this.name].append(val)
else:
params[this.name] = this.str2type(val)
return params, args
class TaskParse(CmdParse):
"""Process string with command options (for tasks)"""
_type = "Task"
| agustinhenze/doit.debian | doit/cmdparse.py | Python | mit | 11,568 |
#!/usr/bin/env python3
'''A simple implementation of a sorting algorithm, meant to allow
people to manually rank a list of items using whatever subjective or
objective criteria they want.
This program can be called as a script and used interactively. You
can provide the list of things to sort as command line arguments, or
if there are no arguments provided, you can provide the list in stdin,
one item per line.
Example run:
$ ./sort.py 'ice cream' falafel hamburgers pizza
Which is greater, falafel or ice cream (<, =, or >)? <
Which is greater, hamburgers or ice cream (<, =, or >)? <
Which is greater, hamburgers or falafel (<, =, or >)? >
Which is greater, pizza or hamburgers (<, =, or >)? >
Which is greater, pizza or ice cream (<, =, or >)? <
* ice cream
* pizza
* hamburgers
* falafel
Author: Adam Mesha <adam@mesha.org>
License: MIT
'''
from functools import cmp_to_key
class memoize:
'''We really want to be sure that we don't ask people to compare the
same two items twice, so we cache the result.
'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
key = tuple(args)
if key not in self.cache:
self.cache[key] = self.func(*args)
return self.cache[key]
@memoize
def cmpfunc(a, b):
result = None
s = 'Which is greater, {a} or {b} (<, =, or >)? '.format(a=a, b=b)
while result is None or result not in '<=>':
result = input(s).strip()
return '<=>'.index(result) - 1
keyfunc = cmp_to_key(cmpfunc)
def binary_insertion_sort(seq, keyfunc):
'''Insertion sort, using binary search to insert each element. Runs
in O(n**2) time, but the use case is when a human is manually
deciding on the ordering, so the most important thing is to reduce
the number of comparisons.
'''
def mv(srcidx, dstidx):
while srcidx > dstidx:
seq[srcidx], seq[srcidx - 1] = seq[srcidx - 1], seq[srcidx]
srcidx -= 1
i = 1
while i < len(seq):
lower = 0; upper = i
while lower < upper:
j = (upper + lower) // 2
key1, key2 = keyfunc(seq[i]), keyfunc(seq[j])
if key1 == key2:
mv(i, j+1) # XXX this is not stable
i += 1
break
if key1 < key2:
upper = j
else: # >
lower = j + 1
else:
mv(i, upper)
i += 1
class SortableWithHeuristic:
def __init__(self, val, heur):
self.val = val
self.heur = heur
def __str__(self):
return '{val}: {heur}'.format(val=self.val, heur=self.heur)
def __repr__(self):
return '{}(val={}, heur={})'.format(self.__class__.__name__,
repr(self.val),
repr(self.heur))
def get_heuristic_func(val):
result = None
s = 'Give an approximate numeric score to item {}: '.format(val)
while result is None:
try:
result = float(input(s).strip())
except ValueError:
pass
return result
def heuristic_sort(seq, get_heuristic_func, cmpfunc):
def swap(a, b):
seq[a], seq[b] = seq[b], seq[a]
idx = 0
while idx < len(seq):
val = seq[idx]
heur = get_heuristic_func(val)
seq[idx] = SortableWithHeuristic(val, heur)
# find the current location
j = idx
while j > 0 and seq[j].heur < seq[j-1].heur:
swap(j, j-1)
j -= 1
moved = False
while j < idx and cmpfunc(seq[j].val, seq[j+1].val) == 1:
swap(j, j+1)
j += 1
moved = True
if not moved:
while j > 0 and cmpfunc(seq[j].val, seq[j-1].val) == -1:
swap(j, j-1)
j -= 1
if 0 < j < idx:
seq[j].heur = (seq[j-1].heur + seq[j+1].heur) / 2
elif idx > 0:
if j == 0 and seq[j].heur > seq[j+1].heur:
seq[j].heur = seq[j+1].heur - 1
elif j == idx and seq[j].heur < seq[j-1].heur:
seq[j].heur = seq[j-1].heur + 1
idx += 1
def main():
import sys
seq = []
if len(sys.argv) > 1:
seq.extend(sys.argv[1:])
if not seq:
seq.extend(x.strip() for x in sys.stdin.readlines())
heuristic_sort(seq, get_heuristic_func, cmpfunc)
print('\n'.join('* {}'.format(item) for item in reversed(seq)))
if __name__ == '__main__':
main()
| sagittarian/personal-sort | sort.py | Python | mit | 4,539 |
## READ VARIABLE FROM SEVERAL NCFILES and store subset of it as NPY
from __future__ import print_function
path = '/network/aopp/cirrus/pred/kloewer/swm_bf_cntrl/data/'
#path = '/network/aopp/cirrus/pred/kloewer/swm_back_ronew/'
import os; os.chdir(path) # change working directory
import numpy as np
from netCDF4 import Dataset
# OPTIONS
runfolder = [0,6]
s = 40 # read s-th last time step
for r in runfolder:
print(('Store last time step from run %i') % r)
## read data
runpath = path+'run%04i' % r
ncu = Dataset(runpath+'/u.nc')
u = ncu['u'][-s,:,:]
ncu.close()
print('u read.')
np.save(runpath+'/u_last.npy',u)
del u
ncv = Dataset(runpath+'/v.nc')
v = ncv['v'][-s,:,:]
ncv.close()
print('v read.')
np.save(runpath+'/v_last.npy',v)
del v
nceta = Dataset(runpath+'/eta.nc')
eta = nceta['eta'][-s,:,:]
#time = nceta['t'][::sub] # in seconds
#t = time / 3600. / 24. # in days
nceta.close()
print('eta read.')
np.save(runpath+'/eta_last.npy',eta)
del eta
| milankl/swm | calc/process/var_subset_last.py | Python | gpl-3.0 | 1,070 |
from __future__ import absolute_import
from future.utils import PY3
__future_module__ = True
if PY3:
from html.parser import *
else:
from HTMLParser import *
| hughperkins/kgsgo-dataset-preprocessor | thirdparty/future/src/future/moves/html/parser.py | Python | mpl-2.0 | 167 |
# -*- coding: utf-8 -*-
"""
sphinx.builders.gettext
~~~~~~~~~~~~~~~~~~~~~~~
The MessageCatalogBuilder class.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
from os import path, walk
from codecs import open
from time import time
from datetime import datetime, tzinfo, timedelta
from collections import defaultdict
from uuid import uuid4
from six import iteritems
from sphinx.builders import Builder
from sphinx.util import split_index_msg
from sphinx.util.nodes import extract_messages, traverse_translatable_index
from sphinx.util.osutil import safe_relpath, ensuredir, canon_path
from sphinx.util.i18n import find_catalog
from sphinx.util.console import darkgreen, purple, bold
from sphinx.locale import pairindextypes
POHEADER = r"""
# SOME DESCRIPTIVE TITLE.
# Copyright (C) %(copyright)s
# This file is distributed under the same license as the %(project)s package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: %(project)s %(version)s\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: %(ctime)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"""[1:]
class Catalog(object):
"""Catalog of translatable messages."""
def __init__(self):
self.messages = [] # retain insertion order, a la OrderedDict
self.metadata = {} # msgid -> file, line, uid
def add(self, msg, origin):
if not hasattr(origin, 'uid'):
# Nodes that are replicated like todo don't have a uid,
# however i18n is also unnecessary.
return
if msg not in self.metadata: # faster lookup in hash
self.messages.append(msg)
self.metadata[msg] = []
self.metadata[msg].append((origin.source, origin.line, origin.uid))
class MsgOrigin(object):
"""
Origin holder for Catalog message origin.
"""
def __init__(self, source, line):
self.source = source
self.line = line
self.uid = uuid4().hex
class I18nBuilder(Builder):
"""
General i18n builder.
"""
name = 'i18n'
versioning_method = 'text'
versioning_compare = None # be set by `gettext_uuid`
def __init__(self, app):
self.versioning_compare = app.env.config.gettext_uuid
super(I18nBuilder, self).__init__(app)
def init(self):
Builder.init(self)
self.catalogs = defaultdict(Catalog)
def get_target_uri(self, docname, typ=None):
return ''
def get_outdated_docs(self):
return self.env.found_docs
def prepare_writing(self, docnames):
return
def compile_catalogs(self, catalogs, message):
return
def write_doc(self, docname, doctree):
catalog = self.catalogs[find_catalog(docname,
self.config.gettext_compact)]
for node, msg in extract_messages(doctree):
catalog.add(msg, node)
if 'index' in self.env.config.gettext_additional_targets:
# Extract translatable messages from index entries.
for node, entries in traverse_translatable_index(doctree):
for typ, msg, tid, main, key_ in entries:
for m in split_index_msg(typ, msg):
if typ == 'pair' and m in pairindextypes.values():
# avoid built-in translated message was incorporated
# in 'sphinx.util.nodes.process_index_entry'
continue
catalog.add(m, node)
# determine tzoffset once to remain unaffected by DST change during build
timestamp = time()
tzdelta = datetime.fromtimestamp(timestamp) - \
datetime.utcfromtimestamp(timestamp)
class LocalTimeZone(tzinfo):
def __init__(self, *args, **kw):
super(LocalTimeZone, self).__init__(*args, **kw)
self.tzdelta = tzdelta
def utcoffset(self, dt):
return self.tzdelta
def dst(self, dt):
return timedelta(0)
ltz = LocalTimeZone()
class MessageCatalogBuilder(I18nBuilder):
"""
Builds gettext-style message catalogs (.pot files).
"""
name = 'gettext'
def init(self):
I18nBuilder.init(self)
self.create_template_bridge()
self.templates.init(self)
def _collect_templates(self):
template_files = set()
for template_path in self.config.templates_path:
tmpl_abs_path = path.join(self.app.srcdir, template_path)
for dirpath, dirs, files in walk(tmpl_abs_path):
for fn in files:
if fn.endswith('.html'):
filename = canon_path(path.join(dirpath, fn))
template_files.add(filename)
return template_files
def _extract_from_template(self):
files = self._collect_templates()
self.info(bold('building [%s]: ' % self.name), nonl=1)
self.info('targets for %d template files' % len(files))
extract_translations = self.templates.environment.extract_translations
for template in self.app.status_iterator(
files, 'reading templates... ', purple, len(files)):
with open(template, 'r', encoding='utf-8') as f:
context = f.read()
for line, meth, msg in extract_translations(context):
origin = MsgOrigin(template, line)
self.catalogs['sphinx'].add(msg, origin)
def build(self, docnames, summary=None, method='update'):
self._extract_from_template()
I18nBuilder.build(self, docnames, summary, method)
def finish(self):
I18nBuilder.finish(self)
data = dict(
version = self.config.version,
copyright = self.config.copyright,
project = self.config.project,
ctime = datetime.fromtimestamp(
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
)
for textdomain, catalog in self.app.status_iterator(
iteritems(self.catalogs), "writing message catalogs... ",
darkgreen, len(self.catalogs),
lambda textdomain__: textdomain__[0]):
# noop if config.gettext_compact is set
ensuredir(path.join(self.outdir, path.dirname(textdomain)))
pofn = path.join(self.outdir, textdomain + '.pot')
pofile = open(pofn, 'w', encoding='utf-8')
try:
pofile.write(POHEADER % data)
for message in catalog.messages:
positions = catalog.metadata[message]
if self.config.gettext_location:
# generate "#: file1:line1\n#: file2:line2 ..."
pofile.write("#: %s\n" % "\n#: ".join(
"%s:%s" % (canon_path(
safe_relpath(source, self.outdir)), line)
for source, line, _ in positions))
if self.config.gettext_uuid:
# generate "# uuid1\n# uuid2\n ..."
pofile.write("# %s\n" % "\n# ".join(
uid for _, _, uid in positions))
# message contains *one* line of text ready for translation
message = message.replace('\\', r'\\'). \
replace('"', r'\"'). \
replace('\n', '\\n"\n"')
pofile.write('msgid "%s"\nmsgstr ""\n\n' % message)
finally:
pofile.close()
| fzheng/codejam | lib/python2.7/site-packages/sphinx/builders/gettext.py | Python | mit | 7,836 |
#******************************************************************************#
# User class:
class user:
def __init__(self, sumer=0., car=0, ident=[], rate=[], time=[] ):
self.sum = sumer # Sum of the rate for all the movies.
self.car = car # Number of movies.
self.id = ident # Identifier for each movie.
self.rate = rate # Score for each movie.
self.time = time # Time of the score: From Coordinated Universal Time (UTC) of January 1, 1970.
| DanielDagnino/Machine_Learning-Collaborative_Filter | src/ml_class.py | Python | gpl-3.0 | 466 |
import random
class RegressionTree(object):
"""Wrapper for a whole regression tree."""
def __init__(self, table, target, attr_frac=.75, debug=False,
exclude=[], min_count=10, min_gain=None, split_sampling=100):
"""Build a new regression tree.
table -- complete training table
target -- attribute to learn
attr_frac -- fraction of attributes to use for splitting
debug -- turn on/off debug messages and tests
exclude -- list of attributes to exclude from learning
min_count -- threshold for leaf size
min_gain -- minimum gain in variance for splitting
split_sampling -- number of values to sample when considering
a new split on an attribute
"""
self.target = target
self.attr_frac = attr_frac
self.debug = debug
self.min_count = min_count
self.min_gain = min_gain
self.split_sampling = split_sampling
self.attrs = [attr for attr in table.get_attrs()
if attr != target and attr not in exclude]
self.nb_split_attr = int(attr_frac * len(self.attrs))
self.root = RegressionNode(table, self)
#
# PREDICTION
#
def predict(self, inst):
"""Regress a new instance"""
return self.root.predict(inst)
def neigh_predict(self, inst, attr, min_val, max_val):
"""For attribute `attr`, predict the regressand value as well
as its two closest split values and the regressand values beyond
(but close to) these bounds.
inst -- instance to lookup
attr -- attribute whose neighborhood we explore
min_val -- attribute's minimum value on the training set
max_val -- attribute's maximum value on the training set
"""
class AttrNeighborhood(object):
def __init__(self, left_bound, right_bound):
self.left_bound = left_bound
self.right_bound = right_bound
self.left_reg = None
self.cur_reg = None
self.right_reg = None
def __str__(self):
return "%s |%.3f| %s |%.3f| %s" \
% (str(self.left_reg), self.left_bound,
str(self.cur_reg), self.right_bound,
str(self.right_reg))
neigh = AttrNeighborhood(min_val, max_val)
self.root.neigh_predict(inst, attr, neigh)
return neigh
#
# DUMPING
#
def dump_to_graph(self, g):
"""Dump to a (GraphViz) representation."""
return self.root.dump_to_graph(g)
def __str__(self):
"""Give a (multi-line) string representation."""
return self.root.__str__()
class RegressionNode(object):
def __init__(self, table, tree):
"""Grow a new node in a given regression tree.
table -- LocalTable/RemoteTable to train from
tree -- tree to grow in
"""
self.table = table
self.tree = tree
self.variance = table.variance(tree.target)
if table.count() < tree.min_count or self.variance == 0.:
return self.become_leaf()
best_split = self.try_split()
gain = 1 - best_split.exp_var / self.variance
if gain <= tree.min_gain:
return self.become_leaf()
if tree.debug:
assert issubclass(table, TableInterface)
print " --"
print " | count: %d" % table.count()
print " | variance: %.3e" % self.variance
print " | variance gain: %.1f%%" % (100. * gain)
self.split = best_split
self.left_branch = RegressionNode(best_split.left_table, tree)
self.right_branch = RegressionNode(best_split.right_table, tree)
def become_leaf(self):
self.split = None
self.left_branch = None
self.right_branch = None
self.leaf_value = self.table.mean(self.tree.target)
if self.tree.debug:
print " --"
print " | becoming a leaf"
print " | count: %d" % self.table.count()
def is_leaf(self):
return self.left_branch == None or self.right_branch == None
#
# SPLITTING
#
class AttrSplit:
"""Computes an attribute split and stores the resulting tables."""
def __init__(self, node, attr, split_val):
table = node.table
left_table, right_table, null_table = table.split(attr, split_val)
if null_table.count() > 0:
raise NotImplementedError, "No unknown attributes for now"
self.attr = attr
self.val = split_val
self.left_table = left_table
self.right_table = right_table
self.null_table = null_table
if left_table.count() == 0 or right_table.count() == 0:
self.exp_var = table.variance(node.tree.target)
else:
q = float(left_table.count()) / table.count()
left_var = left_table.variance(node.tree.target)
right_var = right_table.variance(node.tree.target)
self.exp_var = q * left_var + (1 - q) * right_var
def try_split(self):
# don't use all attributes
# e.g. to avoid cross-correlation in Random Forests
split_attrs = random.sample(self.tree.attrs, self.tree.nb_split_attr)
sampling = self.tree.split_sampling
best_split = None
for attr in split_attrs:
for split_val in self.table.sample_attr(attr, sampling):
split = self.AttrSplit(self, attr, split_val)
if not best_split or split.exp_var < best_split.exp_var:
best_split = split
if self.tree.debug:
assert best_split, "Found no split in %s" % split_attrs
return best_split
#
# PREDICTIONS
#
def predict(self, inst):
"""Predict the regressand value for instance `inst`."""
# TODO: unknown attributes
if self.is_leaf():
return self.leaf_value
elif inst[self.split.attr] <= self.split.val:
return self.left_branch.predict(inst)
else:
return self.right_branch.predict(inst)
def neigh_predict(self, inst, attr, neigh):
"""Internal, side-effect part of RegressionTree.neigh_predict().
inst -- instance to predict from
attr -- the attribute whose neighborhood we are exploring
neigh -- datastructure where neighborhood infos are to be stored
"""
if self.is_leaf():
neigh.cur_reg = self.leaf_value
return neigh
assert (self.split.attr in inst.keys())
if inst[self.split.attr] <= self.split.val:
self.left_branch.neigh_predict(inst, attr, neigh)
if self.split.attr == attr \
and neigh.right_bound > self.split.val:
neigh.right_bound = self.split.val
neigh.right_reg = self.right_branch.predict(inst)
else:
self.right_branch.neigh_predict(inst, attr, neigh)
if self.split.attr == attr \
and neigh.left_bound < self.split.val:
neigh.left_bound = self.split.val
neigh.left_reg = self.left_branch.predict(inst)
#
# DUMPING
#
def __str__(self, depth=0):
s = ' | ' * depth
if self.is_leaf():
s += '%.2f [c=%d, V=%.1e]\n' \
% (self.leaf_value, self.table.count(), self.variance)
else:
s += '%s(%.2f) [c=%d, V=%.1e]\n' \
% (self.split.attr, self.split.val,
self.table.count(), self.variance)
s += "%s%s" \
% (self.left_branch.__str__(depth + 1),
self.right_branch.__str__(depth + 1))
return s
def dump_to_graph(self, g):
count = self.table.count()
var = self.variance
sub_label = 'count = %d\\nVar = %.2e' % (count, var)
if self.is_leaf():
label = '%.2f\\n%s' % (self.leaf_value, sub_label)
g.add_node(self.table.name, label=label, shape='box',
fillcolor='#FFFFBB', style='filled')
else:
attr = self.split.attr.replace('_', ' ').upper()
label = '%s\\n%s' % (attr, sub_label)
g.add_node(self.table.name, label=label, shape='egg',
fillcolor='#BBFFFF', style='filled')
self.left_branch.dump_to_graph(g)
self.right_branch.dump_to_graph(g)
g.add_edge(self.table.name, self.left_branch.table.name,
label='≤ %.2f' % self.split.val)
g.add_edge(self.table.name, self.right_branch.table.name,
label='> %.2f' % self.split.val)
| mlskit/astromlskit | RANDOM_TREES/Rforest/RandomTrees/regression.py | Python | gpl-3.0 | 9,032 |
# This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-ca. If not,
# see <http://www.gnu.org/licenses/>
"""Test the sign_cert management command."""
import io
import os
import re
import stat
import unittest
from datetime import datetime
from datetime import timedelta
from cryptography import x509
from cryptography.x509.oid import NameOID
from django.core.files.storage import FileSystemStorage
from django.test import TestCase
from freezegun import freeze_time
from .. import ca_settings
from ..extensions import ExtendedKeyUsage
from ..extensions import IssuerAlternativeName
from ..extensions import KeyUsage
from ..extensions import SubjectAlternativeName
from ..extensions import TLSFeature
from ..models import Certificate
from ..models import CertificateAuthority
from ..signals import post_issue_cert
from ..signals import pre_issue_cert
from ..subject import Subject
from ..utils import ca_storage
from .base import certs
from .base import override_settings
from .base import override_tmpcadir
from .base import timestamps
from .base.mixins import TestCaseMixin
@override_settings(CA_MIN_KEY_SIZE=1024, CA_PROFILES={}, CA_DEFAULT_SUBJECT={})
@freeze_time(timestamps["everything_valid"])
class SignCertTestCase(TestCaseMixin, TestCase):
"""Main test class for this command."""
default_ca = "root"
load_cas = "__usable__"
def setUp(self) -> None:
super().setUp()
self.csr_pem = certs["root-cert"]["csr"]["pem"]
@override_tmpcadir()
def test_from_stdin(self) -> None:
"""Test reading CSR from stdin."""
stdin = self.csr_pem.encode()
subject = Subject([("CN", "example.com")])
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd("sign_cert", ca=self.ca, subject=subject, stdin=stdin)
self.assertEqual(stderr, "")
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, subject)
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
self.assertEqual(
cert.key_usage,
KeyUsage({"critical": True, "value": ["digitalSignature", "keyAgreement", "keyEncipherment"]}),
)
self.assertEqual(cert.extended_key_usage, ExtendedKeyUsage({"value": ["serverAuth"]}))
self.assertEqual(
cert.subject_alternative_name, SubjectAlternativeName({"value": ["DNS:example.com"]})
)
self.assertIssuer(self.ca, cert)
self.assertAuthorityKeyIdentifier(self.ca, cert)
@override_tmpcadir()
def test_with_bundle(self) -> None:
"""Test outputting the whole certificate bundle."""
stdin = self.csr_pem.encode()
subject = Subject([("CN", "example.com")])
stdout, stderr = self.cmd("sign_cert", bundle=True, ca=self.ca, subject=subject, stdin=stdin)
cert = Certificate.objects.get()
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.bundle_as_pem}")
self.assertEqual(stderr, "")
@override_tmpcadir()
def test_usable_cas(self) -> None:
"""Test signing with all usable CAs."""
for name, ca in self.cas.items():
cname = f"{name}-signed.example.com"
stdin = self.csr_pem.encode()
subject = Subject([("CN", cname)])
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd(
"sign_cert", ca=ca, subject=subject, password=certs[name]["password"], stdin=stdin
)
self.assertEqual(stderr, "")
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get(ca=ca, cn=cname)
self.assertPostIssueCert(post, cert)
self.assertSignature(reversed(ca.bundle), cert)
self.assertSubject(cert.pub.loaded, subject)
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
self.assertEqual(
cert.key_usage,
KeyUsage(
{"critical": True, "value": ["digitalSignature", "keyAgreement", "keyEncipherment"]}
),
)
self.assertEqual(cert.extended_key_usage, ExtendedKeyUsage({"value": ["serverAuth"]}))
self.assertEqual(
cert.subject_alternative_name, SubjectAlternativeName({"value": [f"DNS:{cname}"]})
)
self.assertIssuer(ca, cert)
self.assertAuthorityKeyIdentifier(ca, cert)
@override_tmpcadir()
def test_from_file(self) -> None:
"""Test reading CSR from file."""
csr_path = os.path.join(ca_settings.CA_DIR, "test.csr")
with open(csr_path, "w", encoding="ascii") as csr_stream:
csr_stream.write(self.csr_pem)
try:
subject = Subject([("CN", "example.com"), ("emailAddress", "user@example.com")])
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd("sign_cert", ca=self.ca, subject=subject, csr=csr_path)
self.assertEqual(stderr, "")
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, subject)
self.assertEqual(stdout, cert.pub.pem)
self.assertEqual(
cert.key_usage,
KeyUsage(
{"critical": True, "value": ["digitalSignature", "keyAgreement", "keyEncipherment"]}
),
)
self.assertEqual(cert.extended_key_usage, ExtendedKeyUsage({"value": ["serverAuth"]}))
self.assertEqual(
cert.subject_alternative_name, SubjectAlternativeName({"value": ["DNS:example.com"]})
)
finally:
os.remove(csr_path)
@override_tmpcadir()
def test_to_file(self) -> None:
"""Test writing PEM to file."""
out_path = os.path.join(ca_settings.CA_DIR, "test.pem")
stdin = self.csr_pem.encode()
try:
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd(
"sign_cert",
ca=self.ca,
subject=Subject([("CN", "example.com")]),
out=out_path,
stdin=stdin,
)
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertEqual(stdout, "Please paste the CSR:\n")
self.assertEqual(stderr, "")
self.assertIssuer(self.ca, cert)
self.assertAuthorityKeyIdentifier(self.ca, cert)
with open(out_path, encoding="ascii") as out_stream:
from_file = out_stream.read()
self.assertEqual(cert.pub.pem, from_file)
finally:
if os.path.exists(out_path):
os.remove(out_path)
@override_tmpcadir()
def test_subject_sort(self) -> None:
"""Test that subject is sorted on the command line."""
cname = "subject-sort.example.com"
subject = f"/CN={cname}/C=AT"
stdin = self.csr_pem.encode()
cmdline = [
"sign_cert",
f"--subject={subject}",
f"--ca={self.ca.serial}",
]
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd_e2e(cmdline, stdin=stdin)
self.assertEqual(pre.call_count, 1)
self.assertEqual(stderr, "")
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertEqual(
cert.pub.loaded.subject,
x509.Name(
[
x509.NameAttribute(NameOID.COUNTRY_NAME, "AT"),
x509.NameAttribute(NameOID.COMMON_NAME, cname),
]
),
)
@override_tmpcadir()
def test_no_dns_cn(self) -> None:
"""Test using a CN that is not a vlaid DNS name."""
# Use a CommonName that is *not* a valid DNSName. By default, this is added as a subjectAltName, which
# should fail.
stdin = self.csr_pem.encode()
cname = "foo bar"
msg = rf"^{cname}: Could not parse CommonName as subjectAlternativeName\.$"
with self.assertCommandError(msg), self.mockSignal(pre_issue_cert) as pre, self.mockSignal(
post_issue_cert
) as post:
self.cmd("sign_cert", ca=self.ca, subject=Subject([("CN", cname)]), cn_in_san=True, stdin=stdin)
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir()
def test_cn_not_in_san(self) -> None:
"""Test adding a CN that is not in the SAN."""
stdin = self.csr_pem.encode()
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd(
"sign_cert",
ca=self.ca,
subject=Subject([("CN", "example.net")]),
cn_in_san=False,
alt=SubjectAlternativeName({"value": ["example.com"]}),
stdin=stdin,
)
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertIssuer(self.ca, cert)
self.assertAuthorityKeyIdentifier(self.ca, cert)
self.assertSubject(cert.pub.loaded, [("CN", "example.net")])
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
self.assertEqual(stderr, "")
self.assertEqual(
cert.subject_alternative_name, SubjectAlternativeName({"value": ["DNS:example.com"]})
)
@override_tmpcadir()
def test_no_san(self) -> None:
"""Test signing without passing any SANs."""
stdin = self.csr_pem.encode()
subject = Subject([("CN", "example.net")])
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd(
"sign_cert",
ca=self.ca,
subject=subject,
cn_in_san=False,
alt=SubjectAlternativeName(),
stdin=stdin,
)
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, subject)
self.assertIssuer(self.ca, cert)
self.assertAuthorityKeyIdentifier(self.ca, cert)
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
self.assertEqual(stderr, "")
self.assertIsNone(cert.subject_alternative_name)
@override_tmpcadir(
CA_DEFAULT_SUBJECT=(
("C", "AT"),
("ST", "Vienna"),
("L", "Vienna"),
("O", "MyOrg"),
("OU", "MyOrgUnit"),
("CN", "CommonName"),
("emailAddress", "user@example.com"),
)
)
def test_profile_subject(self) -> None:
"""Test signing with a subject in the profile."""
self.assertEqual(next(t[1] for t in ca_settings.CA_DEFAULT_SUBJECT if t[0] == "O"), "MyOrg")
self.assertEqual(next(t[1] for t in ca_settings.CA_DEFAULT_SUBJECT if t[0] == "OU"), "MyOrgUnit")
# first, we only pass an subjectAltName, meaning that even the CommonName is used.
stdin = self.csr_pem.encode()
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd(
"sign_cert",
ca=self.ca,
cn_in_san=False,
alt=SubjectAlternativeName({"value": ["example.net"]}),
stdin=stdin,
)
self.assertEqual(stderr, "")
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, ca_settings.CA_DEFAULT_SUBJECT)
self.assertIssuer(self.ca, cert)
self.assertAuthorityKeyIdentifier(self.ca, cert)
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
# replace subject fields via command-line argument:
subject = Subject(
[
("C", "US"),
("ST", "California"),
("L", "San Francisco"),
("O", "MyOrg2"),
("OU", "MyOrg2Unit2"),
("CN", "CommonName2"),
("emailAddress", "user@example.net"),
]
)
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd(
"sign_cert",
ca=self.ca,
cn_in_san=False,
alt=SubjectAlternativeName({"value": ["example.net"]}),
stdin=stdin,
subject=subject,
)
self.assertEqual(pre.call_count, 1)
cert = Certificate.objects.get(cn="CommonName2")
self.assertPostIssueCert(post, cert)
self.assertSubject(cert.pub.loaded, subject)
@override_tmpcadir()
def test_extensions(self) -> None:
"""Test setting extensions for the signed certificate."""
self.ca.issuer_alt_name = "DNS:ian.example.com"
self.ca.save()
stdin = self.csr_pem.encode()
cmdline = [
"sign_cert",
f"--subject={Subject([('CN', 'example.com')])}",
f"--ca={self.ca.serial}",
"--key-usage=critical,keyCertSign",
"--ext-key-usage=clientAuth",
"--alt=URI:https://example.net",
"--tls-feature=OCSPMustStaple",
]
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd_e2e(cmdline, stdin=stdin)
self.assertEqual(pre.call_count, 1)
self.assertEqual(stderr, "")
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, [("CN", "example.com")])
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
self.assertEqual(cert.key_usage, KeyUsage({"critical": True, "value": ["keyCertSign"]}))
self.assertEqual(cert.extended_key_usage, ExtendedKeyUsage({"value": ["clientAuth"]}))
self.assertEqual(
cert.subject_alternative_name,
SubjectAlternativeName({"value": ["URI:https://example.net", "DNS:example.com"]}),
)
self.assertEqual(cert.tls_feature, TLSFeature({"value": ["OCSPMustStaple"]}))
self.assertEqual(
cert.issuer_alternative_name, IssuerAlternativeName({"value": [self.ca.issuer_alt_name]})
)
@override_tmpcadir(CA_DEFAULT_SUBJECT={})
def test_no_subject(self) -> None:
"""Test signing without a subject (but SANs)."""
stdin = self.csr_pem.encode()
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd(
"sign_cert", ca=self.ca, alt=SubjectAlternativeName({"value": ["example.com"]}), stdin=stdin
)
cert = Certificate.objects.get()
self.assertEqual(pre.call_count, 1)
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, [("CN", "example.com")])
self.assertEqual(stdout, f"Please paste the CSR:\n{cert.pub.pem}")
self.assertEqual(stderr, "")
self.assertEqual(
cert.subject_alternative_name, SubjectAlternativeName({"value": ["DNS:example.com"]})
)
@override_tmpcadir(CA_DEFAULT_SUBJECT={})
def test_with_password(self) -> None:
"""Test signing with a CA that is protected with a password."""
password = b"testpassword"
ca = self.cas["pwd"]
self.assertIsNotNone(ca.key(password=password))
ca = CertificateAuthority.objects.get(pk=ca.pk)
# Giving no password raises a CommandError
stdin = self.csr_pem.encode()
with self.assertCommandError(
"^Password was not given but private key is encrypted$"
), self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd("sign_cert", ca=ca, alt=SubjectAlternativeName({"value": ["example.com"]}), stdin=stdin)
self.assertEqual(pre.call_count, 0)
self.assertEqual(post.call_count, 0)
# Pass a password
ca = CertificateAuthority.objects.get(pk=ca.pk)
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd(
"sign_cert",
ca=ca,
alt=SubjectAlternativeName({"value": ["example.com"]}),
stdin=stdin,
password=password,
)
self.assertEqual(pre.call_count, 1)
self.assertEqual(post.call_count, 1)
# Pass the wrong password
ca = CertificateAuthority.objects.get(pk=ca.pk)
with self.assertCommandError(self.re_false_password), self.mockSignal(
pre_issue_cert
) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd(
"sign_cert",
ca=ca,
alt=SubjectAlternativeName({"value": ["example.com"]}),
stdin=stdin,
password=b"wrong",
)
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir(CA_DEFAULT_SUBJECT={})
@unittest.skipUnless(
isinstance(ca_storage, FileSystemStorage), "Test only makes sense with local filesystem storage."
)
def test_unparseable(self) -> None:
"""Test creating a cert where the CA private key contains bogus data."""
# NOTE: we assert ca_storage class in skipUnless() above
key_path = os.path.join(ca_storage.location, self.ca.private_key_path) # type: ignore[attr-defined]
os.chmod(key_path, stat.S_IWUSR | stat.S_IRUSR)
with open(key_path, "w", encoding="ascii") as stream:
stream.write("bogus")
os.chmod(key_path, stat.S_IRUSR)
# Giving no password raises a CommandError
stdin = io.StringIO(self.csr_pem)
with self.assertCommandError(self.re_false_password), self.mockSignal(
pre_issue_cert
) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd("sign_cert", ca=self.ca, alt=["example.com"], stdin=stdin)
self.assertEqual(pre.call_count, 0)
self.assertEqual(post.call_count, 0)
@override_tmpcadir()
def test_der_csr(self) -> None:
"""Test using a DER CSR."""
csr_path = os.path.join(ca_settings.CA_DIR, "test.csr")
with open(csr_path, "wb") as csr_stream:
csr_stream.write(certs["child-cert"]["csr"]["der"])
try:
subject = Subject([("CN", "example.com"), ("emailAddress", "user@example.com")])
with self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
stdout, stderr = self.cmd("sign_cert", ca=self.ca, subject=subject, csr=csr_path)
self.assertEqual(pre.call_count, 1)
self.assertEqual(stderr, "")
cert = Certificate.objects.get()
self.assertPostIssueCert(post, cert)
self.assertSignature([self.ca], cert)
self.assertSubject(cert.pub.loaded, subject)
self.assertEqual(stdout, cert.pub.pem)
self.assertEqual(
cert.key_usage,
KeyUsage(
{"critical": True, "value": ["digitalSignature", "keyAgreement", "keyEncipherment"]}
),
)
self.assertEqual(cert.extended_key_usage, ExtendedKeyUsage({"value": ["serverAuth"]}))
self.assertEqual(
cert.subject_alternative_name, SubjectAlternativeName({"value": ["DNS:example.com"]})
)
finally:
os.remove(csr_path)
@override_tmpcadir()
def test_expiry_too_late(self) -> None:
"""Test signing with an expiry after the CA expires."""
time_left = (self.ca.expires - datetime.now()).days
expires = timedelta(days=time_left + 3)
stdin = io.StringIO(self.csr_pem)
with self.assertCommandError(
rf"^Certificate would outlive CA, maximum expiry for this CA is {time_left} days\.$"
), self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd("sign_cert", ca=self.ca, alt={"value": ["example.com"]}, expires=expires, stdin=stdin)
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir()
def test_no_cn_or_san(self) -> None:
"""Test signing a cert that has neither CN nor SAN."""
with self.assertCommandError(
r"^Must give at least a CN in --subject or one or more --alt arguments\.$"
), self.mockSignal(pre_issue_cert) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd("sign_cert", ca=self.ca, subject=Subject([("C", "AT")]))
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir()
@freeze_time(timestamps["everything_valid"])
def test_revoked_ca(self) -> None:
"""Test signing with a revoked CA."""
self.ca.revoke()
stdin = io.StringIO(self.csr_pem)
subject = Subject([("CN", "example.com")])
with self.assertCommandError(r"^Certificate Authority is revoked\.$"), self.mockSignal(
pre_issue_cert
) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd("sign_cert", ca=self.ca, subject=subject, stdin=stdin)
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir()
@freeze_time(timestamps["everything_valid"])
def test_unusable_ca(self) -> None:
"""Test signing with an unusable CA."""
path = ca_storage.path(self.ca.private_key_path)
os.remove(path)
msg = rf"^\[Errno 2\] No such file or directory: '{path}'"
stdin = io.StringIO(self.csr_pem)
subject = Subject([("CN", "example.com")])
with self.assertCommandError(msg), self.mockSignal(pre_issue_cert) as pre, self.mockSignal(
post_issue_cert
) as post:
self.cmd("sign_cert", ca=self.ca, subject=subject, stdin=stdin)
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir()
@freeze_time(timestamps["everything_expired"])
def test_expired_ca(self) -> None:
"""Test signing with an expired CA."""
stdin = io.StringIO(self.csr_pem)
subject = Subject([("CN", "example.com")])
with self.assertCommandError(r"^Certificate Authority has expired\.$"), self.mockSignal(
pre_issue_cert
) as pre, self.mockSignal(post_issue_cert) as post:
self.cmd("sign_cert", ca=self.ca, subject=subject, stdin=stdin)
self.assertFalse(pre.called)
self.assertFalse(post.called)
@override_tmpcadir()
def test_help_text(self) -> None:
"""Test the help text."""
with self.assertCreateCertSignals(False, False):
help_text = self.cmd_help_text("sign_cert")
# Remove newlines and multiple spaces from text for matching independent of terminal width
help_text = re.sub(r"\s+", " ", help_text.replace("\n", ""))
self.assertIn("Do not add the CommonName as subjectAlternativeName.", help_text)
self.assertIn("Add the CommonName as subjectAlternativeName (default).", help_text)
with self.assertCreateCertSignals(False, False), self.settings(
CA_PROFILES={"webserver": {"cn_in_san": False}}
):
help_text = self.cmd_help_text("sign_cert")
help_text = re.sub(r"\s+", " ", help_text.replace("\n", ""))
self.assertIn("Do not add the CommonName as subjectAlternativeName (default).", help_text)
self.assertIn("Add the CommonName as subjectAlternativeName.", help_text)
@override_settings(USE_TZ=True)
class SignCertWithTZTestCase(SignCertTestCase):
"""Same but with timezone support."""
| mathiasertl/django-ca | ca/django_ca/tests/tests_command_sign_cert.py | Python | gpl-3.0 | 25,708 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import ctypes
import sys
from . import api
from . import compat
class LazyResult(object):
"""
Lazy command execution result wrapper.
This wrapper implements a iterator interface.
"""
_return_code = None
_consumed = False
def __init__(self, session, command):
self.session = session
self.command = command
def __next__(self):
if self._finished:
raise StopIteration()
data = ctypes.create_string_buffer(10)
readed_bytes = api.library.ssh_channel_read(self.channel, ctypes.byref(data),
len(data), 0)
if readed_bytes > 0:
return data.value
api.library.ssh_channel_send_eof(self.channel);
self._return_code = api.library.ssh_channel_get_exit_status(self.channel)
api.library.ssh_channel_free(self.channel)
self.channel = None
self._finished = True
raise StopIteration
if sys.version_info[0] == 2:
next = __next__
def __iter__(self):
if self._consumed:
raise RuntimeError("Result are consumed")
self._consumed = True
self._finished = False
self.channel = api.library.ssh_channel_new(self.session);
# Open ssh session
ret = api.library.ssh_channel_open_session(self.channel)
if ret != api.SSH_OK:
raise RuntimeError("Error code: {0}".format(ret))
# Execute the command
ret = api.library.ssh_channel_request_exec(self.channel, self.command)
if ret != api.SSH_OK:
msg = api.library.ssh_get_error(self.session)
raise RuntimeError("Error {0}: {1}".format(ret, msg.decode('utf-8')))
return self
def as_bytes(self):
"""
Launch the command and return a result as bytes.
:returns: bytes chunk of command execution result
:rtype: bytes
"""
return b"".join([x for x in self])
def as_str(self):
"""
Launch the command and return a result as unicode string
:returns: unicode chunk of command execution result
:rtype: str/unicode
"""
return self.as_bytes().decode("utf-8")
def wait(self):
"""
Waits a complete command execution and returns the return code
:returns: execution result return code
:rtype: int
"""
list(self)
return self.return_code
@property
def return_code(self):
return self._return_code
class Result(LazyResult):
"""
Consumed version of LazyResult. Useful for simple command
execution.
"""
_data = None
def __init__(self, *args, **kwargs):
super(Result, self).__init__(*args, **kwargs)
# consume iterator and save state
self._data = list(self)
def as_bytes(self):
"""
Return a cached result.
:returns: bytes chunk of command execution result
:rtype: bytes
"""
return b"".join(self._data)
def wait(self):
return self.return_code
| niwinz/pyssh-ctypes | pyssh/result.py | Python | bsd-3-clause | 3,169 |
#!/usr/bin/python
import sys, os
import subprocess
# Get the current folder
folder = os.path.abspath(__file__)
folder = os.path.dirname(folder)
# Change to static folder
folder = os.path.dirname(folder)
# Change to app folder
folder = os.path.dirname(folder)
# Get app name
app_name = folder.split(os.sep)[-1]
# Change to applications folder
folder = os.path.dirname(folder)
# Change to w2py root folder
folder = os.path.dirname(folder)
# Set the system to that folder
os.chdir(folder)
# Change to the web2py folder
print "App: " + app_name
print "W2PyFolder: " + os.getcwd()
group_name = "wamap_delete"
print "Scheduler Group: " + group_name
pid = "0"
try:
f = open(app_name + ".scheduler." + group_name + ".pid", 'r+')
pid = f.read()
f.close()
except IOError:
pid = "0"
pid = pid.strip()
if (pid == ""):
pid = "0"
print "Last PID: " + str(pid)
# See if web2py scheduler is running
cmd1 = ["/bin/ps ax | grep 'web2py' | awk '{print $1;}'"]
p1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE, shell=True)
out = p1.communicate()[0]
running = False
for line in out.split(os.linesep):
if (pid == line.strip()):
running = True
print "PS List: " + out
s = open(app_name + '.scheduler.' + group_name + '.log', 'a')
if (running == True):
# Process is running?
print "PS IS RUNNING"
s.write("PS IS RUNNING\n")
else:
print "PS NOT RUNNING"
s.write("PS NOT RUNNING\n")
# Start the scheduler app
#cmd = ["/usr/bin/nohup /usr/bin/python web2py.py -K " + app_name + " > /dev/null 2>&1 &"]
cmd = ["/usr/bin/nohup", "/usr/bin/python", "web2py.py", "-K", "'" + app_name + ":" + group_name + "'"] #, "&"] # "> /dev/null 2>&1 &"]
print "RUN APP: " + str(cmd)
#p = subprocess.Popen(cmd, shell=True, close_fds=True) #, creationflags=0x00000008)
p = subprocess.Popen(cmd, close_fds=True) #, creationflags=0x00000008)
f = open(app_name + '.scheduler.' + group_name + '.pid', 'w')
f.write(str(p.pid))
f.close()
# Should run and block until done
#print p.communicate()[0]
#p.wait()
s.close()
sys.exit(0)
| frankyrumple/smc | static/scheduler/start_wamap_delete_scheduler.py | Python | mit | 2,018 |
"""Predicates regarding the state of the challenge."""
import datetime
from apps.managers.challenge_mgr import challenge_mgr
from apps.managers.challenge_mgr.models import GameInfo
def game_enabled(user, game_name):
"""Returns True if the game is enabled."""
_ = user
return GameInfo.objects.filter(name=game_name, enabled=True).count()
def reached_round(user, round_name):
"""Returns True if the current time was past the start of specified round."""
_ = user
info = challenge_mgr.get_round_info(round_name)
today = datetime.datetime.today()
return info and today >= info["start"]
| yongwen/makahiki | makahiki/apps/managers/predicate_mgr/challenge_predicates.py | Python | mit | 619 |
#!/usr/bin/env python
import rospy
from std_msgs.msg import String
import PyCmdMessenger
import threading
# Initialize an ArduinoBoard instance. This is where you specify baud rate and
# serial timeout. If you are using a non ATmega328 board, you might also need
# to set the data sizes (bytes for integers, longs, floats, and doubles).
arduino = PyCmdMessenger.ArduinoBoard("/dev/ttyACM0", baud_rate=115200)
commands = [
["cmd_steer", "i"],
["cmd_throttle", "i"],
["cmd_rpm", "i"],
["cmd_sonar", "ii"],
["cmd_toggle_ebrake", "?"],
["cmd_govern_forward", "i"],
["cmd_govern_reverse", "i"],
["cmd_set_mode", "?"],
["cmd_set_steer_bias", "i"],
["cmd_info", "s"]
]
# Initialize the messenger
commander = PyCmdMessenger.CmdMessenger(arduino, commands)
print("--------PLAYBACK-----------")
def callback(data):
print('PLAYBACK RCVD:', data)
rospy.loginfo(rospy.get_caller_id() + '%s', data.data)
m = ''.join(data.data.split(" ")[1:])
rospy.loginfo(rospy.get_caller_id() + '%s', m)
if(m.startswith("V79-T")):
throttle_pos = int(m.split(":")[1])
commander.send("cmd_throttle", throttle_pos)
if(m.startswith("V79-S")):
steer_angle = int(m.split(":")[1])
commander.send("cmd_steer", steer_angle)
def read_from_pi(_commander):
while True:
print(_commander.receive())
def bus_playback():
# In ROS, nodes are uniquely named. If two nodes with the same
# name are launched, the previous one is kicked off. The
# anonymous=True flag means that rospy will choose a unique
# name for our 'listener' node so that multiple listeners can
# run simultaneously.
rospy.init_node('bus_playback')
rospy.loginfo(rospy.get_caller_id() + '%s', "bus playback started")
rospy.Subscriber('bus_comm', String, callback)
listener_thread = threading.Thread(target = read_from_pi, args=[commander])
print "Starting listener"
listener_thread.start()
# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
if __name__ == '__main__':
bus_playback()
| DrClick/ARCRacing | ros_system_ws/src/vector79/scripts/bus_playback.py | Python | mit | 2,126 |
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAdminUser',),
'PAGE_SIZE': 50,
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication', # Useful for unit tests
)
}
| astrobin/astrobin | astrobin/settings/components/rest.py | Python | agpl-3.0 | 484 |
#!/usr/bin/env python3
import os, json, re
JSON_LINES_FILE = os.path.join(os.path.dirname(os.path.realpath(__file__)), "haiku_lines.json")
PUNCTUATION = r"[`~@#$%_\\'+\-/]" # punctuation that is a part of text
STANDALONE = r"(?:[!.,;()^&\[\]{}|*=<>?]|[dDpP][:8]|:\S)" # standalone characters or emoticons that wouldn't otherwise be captured
WORD_PATTERN = STANDALONE + r"\S*|https?://\S+|(?:\w|" + PUNCTUATION + r")+" # token pattern
WORD_MATCHER = re.compile(WORD_PATTERN, re.IGNORECASE)
def tokenize_text(text):
return (m.lower() for m in WORD_MATCHER.findall(text))
def load_word_syllable_counts():
word_syllable_counts = {}
hyphenation_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "mhyph.txt")
with open(hyphenation_file, "rb") as f:
for line in f.readlines():
try: word = line.rstrip(b"\r\n").replace(b"\xA5", b"").decode("UTF-8")
except UnicodeDecodeError: continue
syllables = 1 + line.count(b"\xA5") + line.count(b" ") + line.count(b"-")
word_syllable_counts[word] = syllables
return word_syllable_counts
CHAT_HISTORY_DIRECTORY = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", "..", "@history")
def get_metadata():
with open(os.path.join(CHAT_HISTORY_DIRECTORY, "metadata", "users.json"), "r") as f:
entries = json.load(f)
user_names = {entry["id"]: entry["name"] for entry in entries}
user_real_names = {entry["id"]: entry["profile"]["real_name"] for entry in entries}
with open(os.path.join(CHAT_HISTORY_DIRECTORY, "metadata", "channels.json"), "r") as f:
entries = json.load(f)
channel_names = {entry["id"]: entry["name"] for entry in entries}
return user_names, user_real_names, channel_names
USER_NAMES_BY_ID, USER_REAL_NAMES_BY_ID, CHANNEL_NAMES_BY_ID = get_metadata()
def server_text_to_text(server_text):
"""Returns `server_text`, a string in Slack server message format, converted into a plain text string. The transformation can lose some information for escape sequences, such as link labels."""
assert isinstance(server_text, str), "`server_text` must be a string rather than \"{}\"".format(server_text)
text_without_special_sequences = re.sub(r"<[^<>]*>", "", server_text)
assert "<" not in text_without_special_sequences and ">" not in text_without_special_sequences, "Invalid special sequence in server text \"{}\", perhaps some text needs to be escaped"
# process link references
def process_special_sequence(match):
original, body = match.group(0), match.group(1).split("|")[0]
if body.startswith("#"): # channel reference
return "#" + CHANNEL_NAMES_BY_ID[body[1:]] if body[1:] in CHANNEL_NAMES_BY_ID else original
if body.startswith("@"): # user reference
return "@" + USER_NAMES_BY_ID[body[1:]] if body[1:] in USER_NAMES_BY_ID else original
if body.startswith("!"): # special command
if body == "!channel": return "@channel"
if body == "!group": return "@group"
if body == "!everyone": return "@everyone"
return body # link, should remove angle brackets and label in order to allow it to linkify
raw_text = re.sub(r"<(.*?)>", process_special_sequence, server_text)
return raw_text.replace("<", "<").replace(">", ">").replace("&", "&")
def get_message_text(message):
"""Returns the text value of `message` if it is a valid text message, or `None` otherwise"""
if message.get("type") == "message" and isinstance(message.get("ts"), str):
if isinstance(message.get("text"), str) and isinstance(message.get("user"), str): # normal message
return server_text_to_text(message["text"])
if message.get("subtype") == "message_changed" and isinstance(message.get("message"), dict) and isinstance(message["message"].get("user"), str) and isinstance(message["message"].get("text"), str): # edited message
return server_text_to_text(message["message"]["text"])
return None
def get_history_files():
"""Returns a mapping from channel IDs to absolute file paths of their history entries"""
for dirpath, _, filenames in os.walk(CHAT_HISTORY_DIRECTORY):
result = {}
for history_file in filenames:
channel_id, extension = os.path.splitext(os.path.basename(history_file))
if extension != ".json": continue
result[channel_id] = os.path.join(dirpath, history_file)
return result
return {}
# obtain mapping from words to the number of syllables in those words
word_syllable_counts = load_word_syllable_counts()
# find messages with 5 syllables and 7 syllables
five_syllable_messages = []
seven_syllable_messages = []
for channel_id, history_file in get_history_files().items():
with open(history_file, "r") as f:
for entry in f:
text = get_message_text(json.loads(entry))
if text is None: continue
# count syllables in the text
syllables = 0
for token in tokenize_text(text):
if token in word_syllable_counts:
syllables += word_syllable_counts[token]
else: # unknown word, ignore the whole message
break
else:
if syllables == 5:
five_syllable_messages.append(text)
elif syllables == 7:
seven_syllable_messages.append(text)
# store result
result = {"five_syllables": five_syllable_messages, "seven_syllables": seven_syllable_messages}
with open(JSON_LINES_FILE, "w") as f:
json.dump(result, f)
| Uberi/botty-bot-bot-bot | src/plugins/haiku/generate_haiku_lines.py | Python | mit | 5,681 |
# An example script to show how to output a sine wave using a DAC.
# Because we have to do it all in software, there are limitations on how fast
# we can update the DAC. Update intervals faster than 5 ms may give weird
# results because of the large percentage of missed updates.
#
# Note: This example uses signal.setitimer() and signal.alarm(), and therefore
# requires Python 2.6 on Unix to run. See:
# http://docs.python.org/library/signal.html#signal.setitimer
# http://docs.python.org/library/signal.html#signal.alarm
#
# When changing the update interval and frequency, consider how your values
# effect the waveform. A slow update interval coupled with a fast frequency
# can result in strange behavior. Try to keep the period (1/frequency) much
# greater than update interval.
# Constants. Change these to change the results:
# Controls how fast the DAC will be updated, in seconds.
UPDATE_INTERVAL = 0.005
# The frequency of the sine wave, in Hz
FREQUENCY = 10
# Imports:
import u3, u6, ue9 # For working with the U3
import signal # For timing
import math # For sin function
from datetime import datetime # For printing times
if __name__ == '__main__':
print "This program will attempt to generate a sine wave with a frequency of %s Hz, updating once every %s seconds." % (FREQUENCY, UPDATE_INTERVAL)
print "Opening LabJack...",
# Open up our LabJack
d = u3.U3()
#d = u6.U6()
#d = ue9.UE9()
print "Done"
# Make a class to keep track of variables and the like
class DacSetter(object):
def __init__(self, frequency, updateInterval):
self.count = 0
self.dac = 0
self.setDacCount = 0
self.go = True
# Points between peaks (pbp)
pbp = (float(1)/frequency)/updateInterval
# Figure out how many degrees per update we need to go.
self.step = float(360)/pbp
# Stupid sin function only takes radians... but I think in degrees.
self.degToRad = ( (2*math.pi) / 360 )
def setDac(self):
# calculate the value to put in the sin
value = (self.setDacCount * self.step) * self.degToRad
# Writes the dac.
self.dac = d.writeRegister(5000, 2.5+2*math.sin(value))
# Count measures how many successful updates occurred.
self.count += 1
# Lower the go flag
self.go = False
def handleSetDac(self, signum, frame):
# This function gets called every UPDATE_INTERVAL seconds.
# Raise the go flag.
self.go = True
# setDacCount measures how many times the timer went off.
self.setDacCount += 1
# Create our DacSetter
dacs = DacSetter(FREQUENCY, UPDATE_INTERVAL)
# Set up the signals
signal.signal(signal.SIGALRM, dacs.handleSetDac)
signal.setitimer(signal.ITIMER_REAL, UPDATE_INTERVAL, UPDATE_INTERVAL)
# Run for ~10 seconds. Expect about 2 extra seconds of overhead.
signalcount = int(10/UPDATE_INTERVAL)
# Print the current time, just to let you know something is happening.
print "Start:", datetime.now()
for i in xrange(signalcount):
# Wait for signal to be received
signal.pause()
# If the dacs flag is set, set the dac.
if dacs.go:
dacs.setDac()
# Print the stop time, in case you wanted to know.
print "Stop:", datetime.now()
# Done with the timer, let's turn it off.
signal.setitimer(signal.ITIMER_REAL, 0)
# Print short summary of the difference between how may updates were
# expected and how many occurred.
print "# of Updates = %s, # of signals = %s" % (dacs.count, dacs.setDacCount)
print "The closer the number of updates is to the number of signals, the better your waveform will be."
| bmazin/SDR | DataReadout/ReadoutControls/lib/LabJackPython-8-26-2011/Examples/outputSinDAC.py | Python | gpl-2.0 | 4,045 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes supporting editing of DAO/DTO-managed models."""
__author__ = [
'John Orr (jorr@google.com)',
'Mike Gainer (mgainer@googe.com)'
]
import cgi
import copy
import urllib
from common.crypto import XsrfTokenManager
from controllers import utils
from models import roles
from models import transforms
from modules.oeditor import oeditor
class BaseDatastoreAssetEditor(utils.ApplicationHandler):
def get_form(self, rest_handler, key, exit_url, deletable=True):
"""Build the Jinja template for the editor form."""
rest_url = self.canonicalize_url(rest_handler.URI)
exit_url = self.canonicalize_url(exit_url)
if key and deletable:
delete_url = '%s?%s' % (
self.canonicalize_url(rest_handler.URI),
urllib.urlencode({
'key': key,
'xsrf_token': cgi.escape(
self.create_xsrf_token(rest_handler.XSRF_TOKEN))
}))
else:
delete_url = None
schema = rest_handler.get_schema()
return oeditor.ObjectEditor.get_html_for(
self,
schema.get_json_schema(),
schema.get_schema_dict(),
key, rest_url, exit_url,
delete_url=delete_url, delete_method='delete',
required_modules=rest_handler.REQUIRED_MODULES,
extra_js_files=rest_handler.EXTRA_JS_FILES)
class BaseDatastoreRestHandler(utils.BaseRESTHandler):
"""Basic REST operations for DTO objects.
Provides REST functionality for derived classes based on Entity/DAO/DTO
pattern (see models/models.py). Subclasses are expected to provide
the following:
DAO: Subclasses should have a class-level variable named "DAO".
This should name the DAO type corresponding to the entity
being handled. DAO must have a member "DTO", which names
the DTO type.
XSRF_TOKEN: A short string of the form 'foobar-edit', where foobar
is a short, lowercased version of the name of the entity type.
SCHEMA_VERSIONS: A list of supported version numbers of schemas
of items. The 0th element of the list must be the preferred
version number for newly-created items.
Hook method overrides. Other than the basic 'put', 'delete', and
'get' methods, there are a number of hook functions you may need
to override. The only mandatory function is 'get_default_version()'.
"""
def sanitize_input_dict(self, json_dict):
"""Give subclasses a hook to clean up incoming data before storage.
Args:
json_dict: This is the raw dict contining a parse of the JSON
object as returned by the form editor. In particular, it
has not been converted into a DTO yet. Modify the dict
in place to clean up values. (E.g., remove leading/trailing
whitespace, fix up string/int conversions, etc.)
"""
pass
def validate(self, item_dict, key, schema_version, errors):
"""Allow subclasses to do validations that the form cannot.
Args:
item_dict: A Python dict that will be used to populate
the saved version of the item. Modify this in place as
necessary.
key: The key for the item, if available. New items will not
yet have a key when this function is called.
schema_version: This version has already been checked against
the SCHEMA_VERSIONS declared in your class; it is provided
to facilitate dispatch to a version-specific validation
function.
errors: A list of strings. These will be displayed
on the editor page when there is a problem. The save
operation will be prevented if there are any entries in
the errors list.
"""
pass
def pre_save_hook(self, dto):
"""Give subclasses a hook to modify the DTO before saving."""
pass
def after_save_hook(self):
"""Give subclasses a hook to perform an action after saving."""
pass
def is_deletion_allowed(self, dto):
"""Allow subclasses to check referential integrity before delete.
If deletion is not allowed, the subclass should:
- Return False.
- Return an appropriate message to the REST client; the base
class will just return without taking any further action.
Args:
dto: A DTO of the type specified by the subclass' DAO.DTO variable.
Returns:
True: The base class may proceed with deletion.
False: Deletion is prohibited; derived class has emitted a response.
"""
return True
def transform_for_editor_hook(self, item_dict):
"""Allow subclasses to modify dict before it goes to the edit form."""
return item_dict
def transform_after_editor_hook(self, item_dict):
"""Allow subclasses to modify dict returned from editor form."""
return item_dict
def get_default_content(self):
"""Subclass provides default values to initialize editor form."""
raise NotImplementedError('Subclasses must override this function.')
def put(self):
"""Store a DTO in the datastore in response to a PUT."""
request = transforms.loads(self.request.get('request'))
key = request.get('key')
if not self.assert_xsrf_token_or_fail(
request, self.XSRF_TOKEN, {'key': key}):
return
if not roles.Roles.is_course_admin(self.app_context):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
payload = request.get('payload')
json_dict = transforms.loads(payload)
self.sanitize_input_dict(json_dict)
errors = []
try:
python_dict = transforms.json_to_dict(
json_dict, self.get_schema().get_json_schema_dict())
version = python_dict.get('version')
if version not in self.SCHEMA_VERSIONS:
errors.append('Version %s not supported.' % version)
else:
python_dict = self.transform_after_editor_hook(python_dict)
self.validate(python_dict, key, version, errors)
except ValueError as err:
errors.append(str(err))
if errors:
self.validation_error('\n'.join(errors), key=key)
return
if key:
item = self.DAO.DTO(key, python_dict)
else:
item = self.DAO.DTO(None, python_dict)
self.pre_save_hook(item)
key_after_save = self.DAO.save(item)
self.after_save_hook()
transforms.send_json_response(
self, 200, 'Saved.', payload_dict={'key': key_after_save})
def delete(self):
"""Delete the Entity in response to REST request."""
key = self.request.get('key')
if not self.assert_xsrf_token_or_fail(
self.request, self.XSRF_TOKEN, {'key': key}):
return
if not roles.Roles.is_course_admin(self.app_context):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
item = self.DAO.load(key)
if not item:
transforms.send_json_response(
self, 404, 'Not found.', {'key': key})
return
if self.is_deletion_allowed(item):
self.DAO.delete(item)
transforms.send_json_response(self, 200, 'Deleted.')
def get(self):
"""Respond to the REST GET verb with the contents of the item."""
key = self.request.get('key')
if not roles.Roles.is_course_admin(self.app_context):
transforms.send_json_response(
self, 401, 'Access denied.', {'key': key})
return
if key:
item = self.DAO.load(key)
version = item.dict.get('version')
if version not in self.SCHEMA_VERSIONS:
transforms.send_json_response(
self, 403, 'Version %s not supported.' % version,
{'key': key})
return
display_dict = copy.copy(item.dict)
display_dict['id'] = item.id
payload_dict = self.transform_for_editor_hook(display_dict)
else:
payload_dict = self.get_default_content()
transforms.send_json_response(
self, 200, 'Success',
payload_dict=payload_dict,
xsrf_token=XsrfTokenManager.create_xsrf_token(self.XSRF_TOKEN))
| danieldanciu/schoggi | modules/dashboard/dto_editor.py | Python | apache-2.0 | 9,313 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a script for installing IronPyCompiler.
"""
from __future__ import with_statement
from setuptools import setup
import sys
import ironpycompiler
# Read README.txt and HISTORY.txt
with open("README.txt", "r") as f_readme:
readme_content = f_readme.read()
with open("HISTORY.txt", "r") as f_history:
history_content = f_history.read()
sysver = sys.version_info
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development",
"Topic :: System :: Software Distribution"]
setup_args = {"name": "ironpycompiler",
"version": ironpycompiler.__version__,
"description": ("Compile IronPython scripts "
"into a stand-alone .NET assembly."),
"long_description": readme_content + "\n\n" + history_content,
"author": "Hamukichi (Nombiri)",
"author_email": "hamukichi-dev@outlook.jp",
"packages": ["ironpycompiler"],
"provides": ["ironpycompiler"],
"url": "https://github.com/hamukichi/ironpycompiler",
"classifiers": classifiers,
"license": "MIT License",
"keywords": ["ironpython", ".net", "assembly", "executable",
"compile", "stand-alone", "pyc.py"],
"install_requires": [],
"entry_points": {"console_scripts":
["ipy2asm = ironpycompiler.ipy2asm:main"]}}
if sysver[0] == 2 and sysver[1] < 7:
setup_args["install_requires"].append("argparse")
setup(**setup_args)
| hamukichi/ironpycompiler | setup.py | Python | mit | 2,051 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-09-18 16:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0010_auto_20180918_1317'),
]
operations = [
migrations.AddField(
model_name='municipio',
name='estado',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.Estado', verbose_name='Estado'),
),
]
| interlegis/saap | saap/core/migrations/0011_municipio_estado.py | Python | gpl-3.0 | 577 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Operation(Model):
"""REST API operation definition.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The name of the operation being performed on this particular
object.
:vartype name: str
:ivar display: The localized display information for this particular
operation or action.
:vartype display: ~azure.mgmt.rdbms.mysql.models.OperationDisplay
:ivar origin: The intended executor of the operation. Possible values
include: 'NotSpecified', 'user', 'system'
:vartype origin: str or ~azure.mgmt.rdbms.mysql.models.OperationOrigin
:ivar properties: Additional descriptions for the operation.
:vartype properties: dict[str, object]
"""
_validation = {
'name': {'readonly': True},
'display': {'readonly': True},
'origin': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(self, **kwargs):
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = None
self.origin = None
self.properties = None
| lmazuel/azure-sdk-for-python | azure-mgmt-rdbms/azure/mgmt/rdbms/mysql/models/operation.py | Python | mit | 1,909 |
"""
Many-to-many relationships
To define a many-to-many relationship, use ``ManyToManyField()``.
In this example, an ``Article`` can be published in multiple ``Publication``
objects, and a ``Publication`` has multiple ``Article`` objects.
"""
from django.db import models
class Publication(models.Model):
title = models.CharField(max_length=30)
def __str__(self):
return self.title
class Meta:
ordering = ('title',)
class Tag(models.Model):
id = models.BigAutoField(primary_key=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Article(models.Model):
headline = models.CharField(max_length=100)
# Assign a string as name to make sure the intermediary model is
# correctly created. Refs #20207
publications = models.ManyToManyField(Publication, name='publications')
tags = models.ManyToManyField(Tag, related_name='tags')
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
# Models to test correct related_name inheritance
class AbstractArticle(models.Model):
class Meta:
abstract = True
publications = models.ManyToManyField(Publication, name='publications', related_name='+')
class InheritedArticleA(AbstractArticle):
pass
class InheritedArticleB(AbstractArticle):
pass
class NullableTargetArticle(models.Model):
headline = models.CharField(max_length=100)
publications = models.ManyToManyField(Publication, through='NullablePublicationThrough')
class NullablePublicationThrough(models.Model):
article = models.ForeignKey(NullableTargetArticle, models.CASCADE)
publication = models.ForeignKey(Publication, models.CASCADE, null=True)
| frankvdp/django | tests/many_to_many/models.py | Python | bsd-3-clause | 1,753 |
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
'''
BitBake 'Fetch' implementations
This implementation is for Secure Shell (SSH), and attempts to comply with the
IETF secsh internet draft:
http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
Currently does not support the sftp parameters, as this uses scp
Also does not support the 'fingerprint' connection parameter.
Please note that '/' is used as host, path separator not ':' as you may
be used to, also '~' can be used to specify user HOME, but again after '/'
Example SRC_URI:
SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
SRC_URI = "ssh://user@host.example.com/~/file.txt"
'''
# Copyright (C) 2006 OpenedHand Ltd.
#
#
# Based in part on svk.py:
# Copyright (C) 2006 Holger Hans Peter Freyther
# Based on svn.py:
# Copyright (C) 2003, 2004 Chris Larson
# Based on functions from the base bb module:
# Copyright 2003 Holger Schurig
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re, os
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
__pattern__ = re.compile(r'''
\s* # Skip leading whitespace
ssh:// # scheme
( # Optional username/password block
(?P<user>\S+) # username
(:(?P<pass>\S+))? # colon followed by the password (optional)
)?
(?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
@
(?P<host>\S+?) # non-greedy match of the host
(:(?P<port>[0-9]+))? # colon followed by the port (optional)
/
(?P<path>[^;]+) # path on the remote system, may be absolute or relative,
# and may include the use of '~' to reference the remote home
# directory
(?P<sparam>(;[^;]+)*)? # parameters block (optional)
$
''', re.VERBOSE)
class SSH(FetchMethod):
'''Class to fetch a module or modules via Secure Shell'''
def supports(self, urldata, d):
return __pattern__.match(urldata.url) != None
def supports_checksum(self, urldata):
return False
def urldata_init(self, urldata, d):
if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
raise bb.fetch2.ParameterError(
"Invalid protocol - if you wish to fetch from a git " +
"repository using ssh, you need to use " +
"git:// prefix with protocol=ssh", urldata.url)
m = __pattern__.match(urldata.url)
path = m.group('path')
host = m.group('host')
urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
os.path.basename(os.path.normpath(path)))
def download(self, urldata, d):
dldir = d.getVar('DL_DIR', True)
m = __pattern__.match(urldata.url)
path = m.group('path')
host = m.group('host')
port = m.group('port')
user = m.group('user')
password = m.group('pass')
if port:
portarg = '-P %s' % port
else:
portarg = ''
if user:
fr = user
if password:
fr += ':%s' % password
fr += '@%s' % host
else:
fr = host
fr += ':%s' % path
import commands
cmd = 'scp -B -r %s %s %s/' % (
portarg,
commands.mkarg(fr),
commands.mkarg(dldir)
)
bb.fetch2.check_network_access(d, cmd, urldata.url)
runfetchcmd(cmd, d)
| twoerner/bitbake | lib/bb/fetch2/ssh.py | Python | gpl-2.0 | 4,245 |
import iccpy.gadget
import matplotlib.pyplot as pl
import numpy as np
import iccpy.utils
sim_label = { 'aqa' : 'A', 'aqb' : 'B', 'aqc':'C', 'aqd':'D', 'aqe':'E' }
last_snapnum = { 'aqa2' : 1023, 'aqa3' : 511, 'aqa4' : 1023, 'aqb2' : 127, 'aqc2' : 127, 'aqd2' : 127, 'aqe2' : 127 }
r_200 = { 'aqa1' : 245.67, 'aqa2' : 245.88, 'aqa3' : 245.64, 'aqa4' : 245.70, 'aqa5' : 246.37, 'aqb2' : 187.70, 'aqb4' : 188.85,
'aqc2' : 242.82, 'aqc4' : 243.68, 'aqd2' : 242.85, 'aqd4' : 243.60, 'aqe2' : 212.28, 'aqe4' : 213.63, 'aqf2' : 209.21,
'aqf4' : 207.15 }
M_200 = { 'aqa1' : 183.9, 'aqa2' : 184.2, 'aqa3' : 183.6, 'aqa4' : 183.8, 'aqa5' : 185.3, 'aqb2' : 81.94, 'aqb4' : 83.45,
'aqc2' : 177.4, 'aqc4' : 179.3, 'aqd2' : 177.4, 'aqd4' : 179.1, 'aqe2' : 118.5, 'aqe4' : 120.8, 'aqf2' : 113.5,
'aqf4' : 110.1 }
merger_tree_filename = { 'aqa2' : '/gpfs/data/jch/Aquarius/Trees/Aq-A/2/trees/treedir_127/tree_127.0.hdf5',
'aqb2' : '/gpfs/data/d50wse/WMAP7_Trees/trees_Aq-B2/treedir_127/tree_127.0.hdf5',
'aqc2' : '/gpfs/data/d50wse/WMAP7_Trees/trees_Aq-C2/treedir_127/tree_127.0.hdf5',
'aqd2' : '/gpfs/data/d50wse/WMAP7_Trees/trees_Aq-D2/treedir_127/tree_127.0.hdf5',
'aqe2' : '/gpfs/data/d50wse/WMAP7_Trees/trees_Aq-E2/treedir_127/tree_127.0.hdf5' }
def get_dir(sim_name):
return "/gpfs/data/aquarius/halo_data/Aq-%s/%c/" % (sim_label[sim_name[0:3]], sim_name[3])
def load_last_snapshot(sim_name):
return iccpy.gadget.load_snapshot(directory=get_dir(sim_name), snapnum=last_snapnum[sim_name])
def get_subhaloes(sim_name, snapnum=None):
if snapnum==None:
snapnum=last_snapnum[sim_name]
catalogue = iccpy.gadget.SubfindCatalogue(get_dir(sim_name), snapnum)
return catalogue.subhalo
def get_halo_centre(sim_name):
return get_subhaloes(sim_name)[0].pot_min
def get_merger_tree(sim_name):
return MergerTree(merger_tree_filename[sim_name])
def plot(plot_func, haloes=['A', 'B', 'C', 'D', 'E', 'F'], legend=None, tick_length=8, minor_tick_x_space=None, minor_tick_y_space=None):
from matplotlib.ticker import MultipleLocator
haloes = np.array(haloes)
# no space between the panels
pl.rcParams.update({'figure.subplot.wspace':0,'figure.subplot.hspace':0})
all_haloes = np.array(['A', 'B', 'C', 'D', 'E', 'F'])
plotIdxs = np.sort(iccpy.utils.match(haloes, all_haloes))
numRows = 3
numCols = 2
for i in plotIdxs:
ax = pl.subplot(numRows,numCols,i+1)
plot_func(all_haloes[i], ax)
#Tidy up plot
if minor_tick_y_space is not None:
ax.yaxis.set_minor_locator(MultipleLocator(minor_tick_y_space))
if minor_tick_x_space is not None:
ax.xaxis.set_minor_locator(MultipleLocator(minor_tick_x_space))
left_tick = i%numCols==0 or i-1 not in plotIdxs
ax.yaxis.get_label().set_visible(left_tick)
for tick in ax.yaxis.get_major_ticks():
tick.label1On=left_tick
tick.tick1line.set_markersize(tick_length)
tick.tick2line.set_markersize(tick_length)
if left_tick and i-numCols in plotIdxs:
lims = ax.get_ylim()
ax.set_ylim(lims[0], 0.9999999999*lims[1])
lower_tick = i>=(numRows-1)*numCols or i+numCols not in plotIdxs
ax.xaxis.get_label().set_visible(lower_tick)
for tick in ax.xaxis.get_major_ticks():
tick.label1On=lower_tick
tick.tick1line.set_markersize(tick_length)
tick.tick2line.set_markersize(tick_length)
for tick in ax.yaxis.get_minor_ticks() + ax.xaxis.get_minor_ticks():
tick.tick1line.set_markersize(tick_length/2)
tick.tick2line.set_markersize(tick_length/2)
if lower_tick and i+1 in plotIdxs and (i+1)%numCols!=0:
lims = ax.get_xlim()
ax.set_xlim(lims[0], 0.9999999999*lims[1])
if lower_tick and not left_tick and i<(numRows-1)*numCols:
lims = ax.get_xlim()
ax.set_xlim(lims[0]*1.0000000001, lims[1])
if ax.get_legend() is not None:
if legend is None:
ax.get_legend().set_visible(False)
elif legend is 'All' or legend is 'all' or all_haloes[i] in legend:
ax.get_legend().draw_frame(False)
else:
ax.get_legend().set_visible(False)
def plot_test(halo, ax):
ax.plot([1,2], [3,4])
pl.legend(['FISH'])
pl.ylabel('x')
if __name__=="__main__":
#print load_last_snapshot("aqa4")
#print get_halo_centre("aqa4")
plot(plot_test, haloes=['A', 'B', 'C', 'D', 'E'], legend='A', minor_tick_x_space=0.025)
#plot(plot_test, minor_tick_x_space=0.025)
pl.show()
| Lowingbn/iccpy | simulations/aquarius.py | Python | mit | 4,810 |
from __future__ import absolute_import
__author__ = 'chris'
from django.db import models
from ..forms import fields as wooey_form_fields
class WooeyOutputFileField(models.FileField):
def formfield(self, **kwargs):
# TODO: Make this from an app that is plugged in
defaults = {'form_class': wooey_form_fields.WooeyOutputFileField}
defaults.update(kwargs)
return super(WooeyOutputFileField, self).formfield(**defaults)
class WooeyUploadFileField(models.FileField):
def formfield(self, **kwargs):
# TODO: Make this from an app that is plugged in
defaults = {'form_class': wooey_form_fields.WooeyUploadFileField}
defaults.update(kwargs)
return super(WooeyUploadFileField, self).formfield(**defaults)
| wooey/Wooey | wooey/models/fields.py | Python | bsd-3-clause | 771 |
r"""
Network
=======
This submodule contains models for calculating topological properties of
networks
"""
from ._topology import *
from ._health import *
| PMEAL/OpenPNM | openpnm/models/network/__init__.py | Python | mit | 158 |
from django.db import models
class Memo(models.Model):
memo_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.memo_text
| a-kirin/Dockerfiles | sample01/web/sample01/memos/models.py | Python | mit | 214 |
from django import forms
from app.models import Player
class CardForm(forms.Form):
name = forms.CharField(max_length=32, label="名字", help_text="點數卡的名子", required=False)
value = forms.IntegerField(label="值", help_text="點數卡的數值", initial="64", max_value=2560, min_value=-2560)
long_desc = forms.CharField(max_length=200, widget=forms.Textarea(), label="說明", required=False)
active = forms.BooleanField(label="開通", help_text="該點數卡是否可用", required=False, initial=True)
class FeedForm(forms.Form):
player = forms.ModelChoiceField(Player.objects.all(), label="玩家")
| sitcon-tw/arcane | app/card/forms.py | Python | agpl-3.0 | 637 |
"""
mbed SDK
Copyright (c) 2018 ARM Limited
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import functools
import itertools
import time
import threading
import uuid
import sys
import serial
import serial.tools.list_ports as stlp
import six
import mbed_host_tests
MSG_KEY_DEVICE_READY = 'ready'
MSG_KEY_SERIAL_NUMBER = 'usb_dev_sn'
MSG_KEY_PORT_OPEN_WAIT = 'port_open_wait'
MSG_KEY_PORT_OPEN_CLOSE = 'port_open_close'
MSG_KEY_SEND_BYTES_SINGLE = 'send_single'
MSG_KEY_SEND_BYTES_MULTIPLE = 'send_multiple'
MSG_KEY_LOOPBACK = 'loopback'
MSG_KEY_CHANGE_LINE_CODING = 'change_lc'
RX_BUFF_SIZE = 32
# This delay eliminates the possibility of the device detecting
# the port being closed when still waiting for data.
TERM_CLOSE_DELAY = 0.01
# A duration the serial terminal is open on the host side
# during terminal reopen test.
TERM_REOPEN_DELAY = 0.1
# 6 (baud) + 2 (bits) + 1 (parity) + 1 (stop) + 3 * comma
LINE_CODING_STRLEN = 13
def usb_serial_name(serial_number):
"""Get USB serial device name based on the device serial number."""
if sys.platform.startswith('win'):
# The USB spec defines all USB string descriptors to be
# UNICODE UTF-16LE. Windows however, decodes the USB serial
# number string descriptor as uppercase characters only.
# To solve this issue, convert the pattern to uppercase.
serial_number = str(serial_number).upper()
for port_info in stlp.comports():
if port_info.serial_number == serial_number:
return port_info.device
return None
class RetryError(Exception):
"""Exception raised by retry_fun_call()."""
def retry_fun_call(fun, num_retries=3, retry_delay=0.0):
"""Call fun and retry if any exception was raised.
fun is called at most num_retries with a retry_dalay in between calls.
Raises RetryError if the retry limit is exhausted.
"""
verbose = False
final_err = None
for retry in range(1, num_retries + 1):
try:
return fun() # pylint: disable=not-callable
except Exception as exc: # pylint: disable=broad-except
final_err = exc
if verbose:
print('Retry {}/{} failed ({})'
.format(retry, num_retries, str(fun)))
time.sleep(retry_delay)
err_msg = 'Failed with "{}". Tried {} times.'
raise RetryError(err_msg.format(final_err, num_retries))
class USBSerialTest(mbed_host_tests.BaseHostTest):
"""Host side test for USB CDC & Serial classes."""
_BYTESIZES = {
5: serial.FIVEBITS,
6: serial.SIXBITS,
7: serial.SEVENBITS,
8: serial.EIGHTBITS}
_PARITIES = {
0: serial.PARITY_NONE,
1: serial.PARITY_ODD,
2: serial.PARITY_EVEN,
3: serial.PARITY_MARK,
4: serial.PARITY_SPACE}
_STOPBITS = {
0: serial.STOPBITS_ONE,
1: serial.STOPBITS_ONE_POINT_FIVE,
2: serial.STOPBITS_TWO}
@staticmethod
def get_usb_serial_name(usb_id_str):
"""Get USB serial device name as registered in the system.
Search is based on the unique USB SN generated by the host
during test suite setup.
Raises RuntimeError if the device is not found.
"""
port_name = usb_serial_name(usb_id_str)
if port_name is None:
err_msg = 'USB serial device (SN={}) not found.'
raise RuntimeError(err_msg.format(usb_id_str))
return port_name
def __init__(self):
super(USBSerialTest, self).__init__()
self.__bg_task = None
self.dut_usb_dev_sn = uuid.uuid4().hex # 32 hex digit string
def port_open_wait(self):
"""Open the serial and wait until it's closed by the device."""
mbed_serial = serial.Serial(dsrdtr=False)
mbed_serial.dtr = False
try:
mbed_serial.port = retry_fun_call(
fun=functools.partial(self.get_usb_serial_name, self.dut_usb_dev_sn), # pylint: disable=not-callable
num_retries=20,
retry_delay=0.05)
retry_fun_call(
fun=mbed_serial.open,
num_retries=20,
retry_delay=0.05)
except RetryError as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
mbed_serial.dtr = True
try:
mbed_serial.read() # wait until closed
except (serial.portNotOpenError, serial.SerialException):
pass
def port_open_close(self):
"""Open the serial and close it with a delay."""
mbed_serial = serial.Serial(timeout=0.5, write_timeout=0.1, dsrdtr=False)
mbed_serial.dtr = False
try:
mbed_serial.port = retry_fun_call(
fun=functools.partial(self.get_usb_serial_name, self.dut_usb_dev_sn), # pylint: disable=not-callable
num_retries=20,
retry_delay=0.05)
retry_fun_call(
fun=mbed_serial.open,
num_retries=20,
retry_delay=0.05)
except RetryError as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
mbed_serial.reset_output_buffer()
mbed_serial.dtr = True
time.sleep(TERM_REOPEN_DELAY)
mbed_serial.close()
def send_data_sequence(self, chunk_size=1):
"""Open the serial and send a sequence of values.
chunk_size defines the size of data sent in each write operation.
The input buffer content is discarded.
"""
mbed_serial = serial.Serial(write_timeout=0.1, dsrdtr=False)
try:
mbed_serial.port = retry_fun_call(
fun=functools.partial(self.get_usb_serial_name, self.dut_usb_dev_sn), # pylint: disable=not-callable
num_retries=20,
retry_delay=0.05)
retry_fun_call(
fun=mbed_serial.open,
num_retries=20,
retry_delay=0.05)
except RetryError as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
mbed_serial.reset_output_buffer()
mbed_serial.dtr = True
for byteval in itertools.chain(reversed(range(0x100)), range(0x100)):
try:
payload = bytearray(chunk_size * (byteval,))
mbed_serial.write(payload)
# self.log('SENT: {!r}'.format(payload))
# Discard input buffer content. The data received from the
# device during the concurrent rx/tx test is irrelevant.
mbed_serial.reset_input_buffer()
except serial.SerialException as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
while mbed_serial.out_waiting > 0:
time.sleep(0.001)
time.sleep(TERM_CLOSE_DELAY)
mbed_serial.close()
def loopback(self):
"""Open the serial and send back every byte received."""
mbed_serial = serial.Serial(timeout=0.5, write_timeout=0.1, dsrdtr=False)
mbed_serial.dtr = False
try:
mbed_serial.port = retry_fun_call(
fun=functools.partial(self.get_usb_serial_name, self.dut_usb_dev_sn), # pylint: disable=not-callable
num_retries=20,
retry_delay=0.05)
retry_fun_call(
fun=mbed_serial.open,
num_retries=20,
retry_delay=0.05)
except RetryError as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
mbed_serial.reset_output_buffer()
mbed_serial.dtr = True
try:
payload = mbed_serial.read(1)
while len(payload) == 1:
mbed_serial.write(payload)
# self.log('SENT: {!r}'.format(payload))
payload = mbed_serial.read(1)
except serial.SerialException as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
while mbed_serial.out_waiting > 0:
time.sleep(0.001)
time.sleep(TERM_CLOSE_DELAY)
mbed_serial.close()
def change_line_coding(self):
"""Open the serial and change serial params according to device request.
New line coding params are read from the device serial data.
"""
mbed_serial = serial.Serial(timeout=0.5, dsrdtr=False)
mbed_serial.dtr = False
try:
mbed_serial.port = retry_fun_call(
fun=functools.partial(self.get_usb_serial_name, self.dut_usb_dev_sn), # pylint: disable=not-callable
num_retries=20,
retry_delay=0.05)
retry_fun_call(
fun=mbed_serial.open,
num_retries=20,
retry_delay=0.05)
except RetryError as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
mbed_serial.reset_output_buffer()
mbed_serial.dtr = True
try:
payload = six.ensure_str(mbed_serial.read(LINE_CODING_STRLEN))
while len(payload) == LINE_CODING_STRLEN:
baud, bits, parity, stop = (int(i) for i in payload.split(','))
new_line_coding = {
'baudrate': baud,
'bytesize': self._BYTESIZES[bits],
'parity': self._PARITIES[parity],
'stopbits': self._STOPBITS[stop]}
mbed_serial.apply_settings(new_line_coding)
payload = six.ensure_str(mbed_serial.read(LINE_CODING_STRLEN))
except serial.SerialException as exc:
self.log('TEST ERROR: {}'.format(exc))
self.notify_complete(False)
return
time.sleep(TERM_CLOSE_DELAY)
mbed_serial.close()
def setup(self):
self.register_callback(MSG_KEY_DEVICE_READY, self.cb_device_ready)
self.register_callback(MSG_KEY_PORT_OPEN_WAIT, self.cb_port_open_wait)
self.register_callback(MSG_KEY_PORT_OPEN_CLOSE, self.cb_port_open_close)
self.register_callback(MSG_KEY_SEND_BYTES_SINGLE, self.cb_send_bytes_single)
self.register_callback(MSG_KEY_SEND_BYTES_MULTIPLE, self.cb_send_bytes_multiple)
self.register_callback(MSG_KEY_LOOPBACK, self.cb_loopback)
self.register_callback(MSG_KEY_CHANGE_LINE_CODING, self.cb_change_line_coding)
def cb_device_ready(self, key, value, timestamp):
"""Send a unique USB SN to the device.
DUT uses this SN every time it connects to host as a USB device.
"""
self.send_kv(MSG_KEY_SERIAL_NUMBER, self.dut_usb_dev_sn)
def start_bg_task(self, **thread_kwargs):
"""Start a new daemon thread.
The callbacks delegate serial handling to a background task to
prevent any delays in the device side assert handling. Only one
background task is kept running to prevent multiple access
to serial.
"""
try:
self.__bg_task.join()
except (AttributeError, RuntimeError):
pass
self.__bg_task = threading.Thread(**thread_kwargs)
self.__bg_task.daemon = True
self.__bg_task.start()
def cb_port_open_wait(self, key, value, timestamp):
"""Open the serial and wait until it's closed by the device."""
self.start_bg_task(target=self.port_open_wait)
def cb_port_open_close(self, key, value, timestamp):
"""Open the serial and close it with a delay."""
self.start_bg_task(target=self.port_open_close)
def cb_send_bytes_single(self, key, value, timestamp):
"""Open the serial and send a sequence of values."""
self.start_bg_task(
target=self.send_data_sequence,
args=(1, ))
def cb_send_bytes_multiple(self, key, value, timestamp):
"""Open the serial and send a sequence of one byte values."""
chunk_size = RX_BUFF_SIZE * int(value)
self.start_bg_task(
target=self.send_data_sequence,
args=(chunk_size, ))
def cb_loopback(self, key, value, timestamp):
"""Open the serial and send a sequence of multibyte values."""
self.start_bg_task(target=self.loopback)
def cb_change_line_coding(self, key, value, timestamp):
"""Open the serial and change the line coding."""
self.start_bg_task(target=self.change_line_coding)
| kjbracey-arm/mbed | TESTS/host_tests/usb_device_serial.py | Python | apache-2.0 | 13,241 |
from networkx.algorithms.assortativity import *
from networkx.algorithms.block import *
from networkx.algorithms.boundary import *
from networkx.algorithms.centrality import *
from networkx.algorithms.cluster import *
from networkx.algorithms.clique import *
from networkx.algorithms.community import *
from networkx.algorithms.components import *
from networkx.algorithms.connectivity import *
from networkx.algorithms.core import *
from networkx.algorithms.cycles import *
from networkx.algorithms.dag import *
from networkx.algorithms.distance_measures import *
from networkx.algorithms.flow import *
from networkx.algorithms.hierarchy import *
from networkx.algorithms.matching import *
from networkx.algorithms.mis import *
from networkx.algorithms.mst import *
from networkx.algorithms.link_analysis import *
from networkx.algorithms.operators import *
from networkx.algorithms.shortest_paths import *
from networkx.algorithms.smetric import *
from networkx.algorithms.traversal import *
from networkx.algorithms.isolate import *
from networkx.algorithms.euler import *
from networkx.algorithms.vitality import *
from networkx.algorithms.chordal import *
from networkx.algorithms.richclub import *
from networkx.algorithms.distance_regular import *
from networkx.algorithms.swap import *
from networkx.algorithms.graphical import *
from networkx.algorithms.simple_paths import *
import networkx.algorithms.assortativity
import networkx.algorithms.bipartite
import networkx.algorithms.centrality
import networkx.algorithms.cluster
import networkx.algorithms.clique
import networkx.algorithms.components
import networkx.algorithms.connectivity
import networkx.algorithms.flow
import networkx.algorithms.isomorphism
import networkx.algorithms.link_analysis
import networkx.algorithms.shortest_paths
import networkx.algorithms.traversal
import networkx.algorithms.chordal
import networkx.algorithms.operators
from networkx.algorithms.bipartite import projected_graph,project,is_bipartite
from networkx.algorithms.isomorphism import is_isomorphic,could_be_isomorphic,\
fast_could_be_isomorphic,faster_could_be_isomorphic
| KNMI/VERCE | verce-hpc-pe/src/networkx/algorithms/__init__.py | Python | mit | 2,128 |
#--------------------------------------------------------------
# Purpose: Creates custom quarter quarter section grid labels.
#
# Author: Ian Broad
# Website: www.ianbroad.com
#
# Created: 04/21/2014
#--------------------------------------------------------------
import arcpy
arcpy.env.overwriteOutput = True
polygon = arcpy.GetParameterAsText(0)
labels = arcpy.GetParameterAsText(1)
qq_grid = arcpy.GetParameterAsText(2)
output = arcpy.GetParameterAsText(3)
#Assigning labels
one,two,three,four,five,six,seven,eight,nine,ten,eleven,twelve,thirteen,fourteen,fifteen,sixteen = labels.split(", ")
mem_point = arcpy.CreateFeatureclass_management("in_memory", "mem_point", "POINT", "", "DISABLED", "DISABLED", polygon)
arcpy.AddField_management(mem_point, "GridLabel", "TEXT")
result = arcpy.GetCount_management(polygon)
count = int(result.getOutput(0))
arcpy.SetProgressor("step", "Creating QQ Section Labels...", 0, count, 1)
insert_cursor = arcpy.da.InsertCursor(mem_point, ["SHAPE@XY", "GridLabel"])
search_cursor = arcpy.da.SearchCursor(polygon, ["SHAPE@"])
for row in search_cursor:
try:
coordinateList = []
lowerLeft_distances = {}
lowerRight_distances = {}
upperLeft_distances = {}
upperRight_distances = {}
for part in row[0]:
for pnt in part:
if pnt:
coordinateList.append((pnt.X, pnt.Y))
#Finds the extent of each polygon
polygonExtent = row[0].extent
lowerLeft_coordinate = polygonExtent.lowerLeft
lowerRight_coordinate = polygonExtent.lowerRight
upperLeft_coordinate = polygonExtent.upperLeft
upperRight_coordinate = polygonExtent.upperRight
lowerLeft_point = arcpy.PointGeometry(lowerLeft_coordinate)
lowerRight_point = arcpy.PointGeometry(lowerRight_coordinate)
upperLeft_point = arcpy.PointGeometry(upperLeft_coordinate)
upperRight_point = arcpy.PointGeometry(upperRight_coordinate)
#Finds the vertex closest to each corner of the polygon extent
for vertex in coordinateList:
vertex_coordinates = arcpy.Point(vertex[0], vertex[1])
vertex_point = arcpy.PointGeometry(vertex_coordinates)
lowerLeft_distances[float(lowerLeft_point.distanceTo(vertex_point))] = (vertex[0], vertex[1])
for vertex in coordinateList:
vertex_coordinates = arcpy.Point(vertex[0], vertex[1])
vertex_point = arcpy.PointGeometry(vertex_coordinates)
lowerRight_distances[float(lowerRight_point.distanceTo(vertex_point))] = (vertex[0], vertex[1])
for vertex in coordinateList:
vertex_coordinates = arcpy.Point(vertex[0], vertex[1])
vertex_point = arcpy.PointGeometry(vertex_coordinates)
upperLeft_distances[float(upperLeft_point.distanceTo(vertex_point))] = (vertex[0], vertex[1])
for vertex in coordinateList:
vertex_coordinates = arcpy.Point(vertex[0], vertex[1])
vertex_point = arcpy.PointGeometry(vertex_coordinates)
upperRight_distances[float(upperRight_point.distanceTo(vertex_point))] = (vertex[0], vertex[1])
#Calculates approximate centroid of each quarter quarter section, it's ugly but good enough for now
LLminDistance = min(lowerLeft_distances)
LRminDistance = min(lowerRight_distances)
ULminDistance = min(upperLeft_distances)
URminDistance = min(upperRight_distances)
top_left_X = upperLeft_distances[ULminDistance][0]
top_left_Y = upperLeft_distances[ULminDistance][1]
top_right_X = upperRight_distances[URminDistance][0]
top_right_Y = upperRight_distances[URminDistance][1]
bottom_left_X = lowerLeft_distances[LLminDistance][0]
bottom_left_Y = lowerLeft_distances[LLminDistance][1]
bottom_right_X = lowerRight_distances[LRminDistance][0]
bottom_right_Y = lowerRight_distances[LRminDistance][1]
one_X = ((bottom_left_X + ((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0))/2.0)
one_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + top_right_Y)/2.0)
two_X = ((((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
two_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + top_right_Y)/2.0)
three_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
three_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + top_right_Y)/2.0)
four_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + top_right_X)/2.0)
four_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + top_right_Y)/2.0)
five_X = ((bottom_left_X + ((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0))/2.0)
five_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + ((top_right_Y + bottom_left_Y)/2.0))/2.0)
six_X = ((((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
six_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + ((top_right_Y + bottom_left_Y)/2.0))/2.0)
seven_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
seven_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + ((top_right_Y + bottom_left_Y)/2.0))/2.0)
eight_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + top_right_X)/2.0)
eight_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + top_right_Y)/2.0) + ((top_right_Y + bottom_left_Y)/2.0))/2.0)
nine_X = ((bottom_left_X + ((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0))/2.0)
nine_Y = ((((top_right_Y + bottom_left_Y)/2.0) + ((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0))/2.0)
ten_X = ((((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
ten_Y = ((((top_right_Y + bottom_left_Y)/2.0) + ((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0))/2.0)
eleven_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
eleven_Y = ((((top_right_Y + bottom_left_Y)/2.0) + ((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0))/2.0)
twelve_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + top_right_X)/2.0)
twelve_Y = ((((top_right_Y + bottom_left_Y)/2.0) + ((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0))/2.0)
thirteen_X = ((bottom_left_X + ((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0))/2.0)
thirteen_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0) + bottom_left_Y)/2.0)
fourteen_X = ((((((top_right_X + bottom_left_X)/2.0) + bottom_left_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
fourteen_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0) + bottom_left_Y)/2.0)
fifteen_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + ((top_right_X + bottom_left_X)/2.0))/2.0)
fifteen_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0) + bottom_left_Y)/2.0)
sixteen_X = ((((((top_right_X + bottom_left_X)/2.0) + top_right_X)/2.0) + top_right_X)/2.0)
sixteen_Y = ((((((top_right_Y + bottom_left_Y)/2.0) + bottom_left_Y)/2.0) + bottom_left_Y)/2.0)
insert_cursor.insertRow(((one_X, one_Y), one))
insert_cursor.insertRow(((two_X, two_Y), two))
insert_cursor.insertRow(((three_X, three_Y), three))
insert_cursor.insertRow(((four_X, four_Y), four))
insert_cursor.insertRow(((five_X, five_Y), five))
insert_cursor.insertRow(((six_X, six_Y), six))
insert_cursor.insertRow(((seven_X, seven_Y), seven))
insert_cursor.insertRow(((eight_X, eight_Y), eight))
insert_cursor.insertRow(((nine_X, nine_Y), nine))
insert_cursor.insertRow(((ten_X, ten_Y), ten))
insert_cursor.insertRow(((eleven_X, eleven_Y), eleven))
insert_cursor.insertRow(((twelve_X, twelve_Y), twelve))
insert_cursor.insertRow(((thirteen_X, thirteen_Y), thirteen))
insert_cursor.insertRow(((fourteen_X, fourteen_Y), fourteen))
insert_cursor.insertRow(((fifteen_X, fifteen_Y), fifteen))
insert_cursor.insertRow(((sixteen_X, sixteen_Y), sixteen))
arcpy.SetProgressorPosition()
except Exception as e:
print e.message
del insert_cursor
del search_cursor
arcpy.SpatialJoin_analysis(qq_grid, mem_point, output, "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "CLOSEST")
arcpy.Delete_management(mem_point)
arcpy.ResetProgressor()
arcpy.GetMessages() | inkenbrandt/ArcPy | PLSS/CreateQQLabels.py | Python | gpl-2.0 | 8,932 |
from models import (AuctionUser, Auction, AuctionItem, AuctionEvent,
AuctionParticipant, AuctionBid)
from django.contrib import admin
admin.site.register(AuctionUser)
admin.site.register(Auction)
admin.site.register(AuctionItem)
admin.site.register(AuctionEvent)
admin.site.register(AuctionParticipant)
admin.site.register(AuctionBid)
| ajportier/djauction | admin.py | Python | gpl-3.0 | 340 |
# coding: utf-8
#
# Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for scripts/linters/css_linter.py."""
from __future__ import annotations
import os
import subprocess
from core.tests import test_utils
from scripts import scripts_test_utils
from . import css_linter
PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
CONFIG_PATH = os.path.join(
PARENT_DIR, 'oppia', 'core', 'templates', 'css', '.stylelintrc')
LINTER_TESTS_DIR = os.path.join(os.getcwd(), 'scripts', 'linters', 'test_files')
VALID_CSS_FILEPATH = os.path.join(LINTER_TESTS_DIR, 'valid.css')
INVALID_CSS_FILEPATH = os.path.join(LINTER_TESTS_DIR, 'invalid.css')
class ThirdPartyCSSLintChecksManagerTests(test_utils.LinterTestBase):
"""Tests for ThirdPartyCSSLintChecksManager class."""
def test_all_filepaths_with_success(self):
filepaths = [VALID_CSS_FILEPATH, INVALID_CSS_FILEPATH]
third_party_linter = css_linter.ThirdPartyCSSLintChecksManager(
CONFIG_PATH, filepaths)
returned_filepaths = third_party_linter.all_filepaths
self.assertEqual(returned_filepaths, filepaths)
def test_perform_all_lint_checks_with_invalid_file(self):
third_party_linter = css_linter.ThirdPartyCSSLintChecksManager(
CONFIG_PATH, [INVALID_CSS_FILEPATH])
lint_task_report = third_party_linter.lint_css_files()
self.assert_same_list_elements([
'19:16',
'Unexpected whitespace before \":\" declaration-colon-space-'
'before'], lint_task_report.get_report())
self.assertEqual('Stylelint', lint_task_report.name)
self.assertTrue(lint_task_report.failed)
def test_perform_all_lint_checks_with_invalid_stylelint_path(self):
def mock_join(*unused_args):
return 'node_modules/stylelint/bin/stylelinter.js'
join_swap = self.swap(os.path, 'join', mock_join)
third_party_linter = css_linter.ThirdPartyCSSLintChecksManager(
CONFIG_PATH, [INVALID_CSS_FILEPATH])
with self.print_swap, join_swap, self.assertRaisesRegexp(
Exception,
'ERROR Please run start.sh first to install node-eslint or '
'node-stylelint and its dependencies.'):
third_party_linter.perform_all_lint_checks()
def test_perform_all_lint_checks_with_stderr(self):
def mock_popen(unused_commands, stdout, stderr): # pylint: disable=unused-argument
return scripts_test_utils.PopenStub(stdout=b'True', stderr=b'True')
popen_swap = self.swap_with_checks(subprocess, 'Popen', mock_popen)
third_party_linter = css_linter.ThirdPartyCSSLintChecksManager(
CONFIG_PATH, [VALID_CSS_FILEPATH])
with self.print_swap, popen_swap, self.assertRaisesRegexp(
Exception, 'True'
):
third_party_linter.perform_all_lint_checks()
def test_perform_all_lint_checks_with_no_files(self):
third_party_linter = css_linter.ThirdPartyCSSLintChecksManager(
CONFIG_PATH, [])
lint_task_report = third_party_linter.perform_all_lint_checks()
self.assertEqual(
'There are no HTML or CSS files to lint.',
lint_task_report[0].get_report()[0])
self.assertEqual('CSS lint', lint_task_report[0].name)
self.assertFalse(lint_task_report[0].failed)
def test_perform_all_lint_checks_with_valid_file(self):
third_party_linter = css_linter.ThirdPartyCSSLintChecksManager(
CONFIG_PATH, [VALID_CSS_FILEPATH])
lint_task_report = third_party_linter.perform_all_lint_checks()
self.assertTrue(isinstance(lint_task_report, list))
def test_get_linters(self):
custom_linter, third_party_linter = css_linter.get_linters(
CONFIG_PATH, [VALID_CSS_FILEPATH, INVALID_CSS_FILEPATH])
self.assertEqual(custom_linter, None)
self.assertTrue(
isinstance(
third_party_linter, css_linter.ThirdPartyCSSLintChecksManager))
| kevinlee12/oppia | scripts/linters/css_linter_test.py | Python | apache-2.0 | 4,592 |
import os, sys, time
import vagrant
from dockyard.core.command_interpreter import *
class StackCommandInterpreter(CommandInterpreter):
""" Command interpreter for Stack related tasks """
def up(self, args):
# Bring Vagrant UP
v = vagrant.Vagrant()
if not v.status()['default'] == 'running':
self.vagrant.up()
# Verify Vagrant is UP
i = 0
while not v.status()['default'] == 'running':
print "waiting for Vagrant box.."
time.sleep(1)
i = i + 1
if i > 5:
print "Something went wrong, Vagrant box is still not up."
sys.exit(1)
# Get a list of the docker containers we have built already
dockerDirs = filter(lambda x: os.path.isdir('docker/' + x), os.listdir('docker'))
imagesBuilt = []
for imageInfo in self.docker.dockerClient.images():
imagesBuilt.append(imageInfo['Repository'])
# Build docker containers
for dockerName in list(set(dockerDirs) - set(imagesBuilt)):
self.docker.build(dockerName)
def down(self, args):
self.vagrant.destroy()
def start(self, args):
for dockName in args.configuration:
self.dockyard.start(dockName)
# dockConfig = json.load(open('dock.json'))
# for dockName in args.configuration:
# if not dockConfig['docks'].get(dockName):
# print 'No such dock configuration [%s] found.' % dockName
# sys.exit(1)
# run('vagrant ssh --command "python /vagrant/scripts/dns.py %s"' % dockName)
# # f = open('','w')
# # f.write('')
# # f.close()
# #dock
def stop(self, args):
for id in args.configuration:
self.dockyard.stop(id)
| smysnk/dockyard | dockyard/core/plugin_stack/command_interpreter.py | Python | mit | 1,870 |
# Thanks go to
# * cbelden for this module (https://github.com/
from controls import kill_stream
from consumers import create_consumer
from producers import create_producer | haukurk/Partify | partify/core/__init__.py | Python | mit | 173 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2016 Multimex (<http://multimex.com.vn>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pricelist
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ncliam/serverpos | openerp/custom_modules/pricelist_uom/__init__.py | Python | agpl-3.0 | 1,075 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import shutil
import signal
import sys
import tempfile
import time
import uuid
import zipfile
from contextlib import closing, contextmanager
from colors import green
from six import string_types
from pants.util.dirutil import safe_delete
from pants.util.tarutil import TarFile
class InvalidZipPath(ValueError):
"""Indicates a bad zip file path."""
def get_joined_path(new_entries, env=None, env_var='PATH', delimiter=':', prepend=False):
"""Join path entries, combining with an environment variable if specified."""
if env is None:
env = {}
prev_path = env.get(env_var, None)
if prev_path is None:
path_dirs = list()
else:
path_dirs = list(prev_path.split(delimiter))
new_entries_list = list(new_entries)
if prepend:
path_dirs = new_entries_list + path_dirs
else:
path_dirs += new_entries_list
return delimiter.join(path_dirs)
@contextmanager
def environment_as(**kwargs):
"""Update the environment to the supplied values, for example:
with environment_as(PYTHONPATH='foo:bar:baz',
PYTHON='/usr/bin/python2.7'):
subprocess.Popen(foo).wait()
"""
new_environment = kwargs
old_environment = {}
def setenv(key, val):
if val is not None:
os.environ[key] = val
else:
if key in os.environ:
del os.environ[key]
for key, val in new_environment.items():
old_environment[key] = os.environ.get(key)
setenv(key, val)
try:
yield
finally:
for key, val in old_environment.items():
setenv(key, val)
@contextmanager
def hermetic_environment_as(**kwargs):
"""Set the environment to the supplied values from an empty state."""
old_environment, os.environ = os.environ, {}
try:
with environment_as(**kwargs):
yield
finally:
os.environ = old_environment
@contextmanager
def _stdio_stream_as(src_fd, dst_fd, dst_sys_attribute, mode):
"""Replace the given dst_fd and attribute on `sys` with an open handle to the given src_fd."""
if src_fd == -1:
src = open('/dev/null', mode)
src_fd = src.fileno()
# Capture the python and os level file handles.
old_dst = getattr(sys, dst_sys_attribute)
old_dst_fd = os.dup(dst_fd)
if src_fd != dst_fd:
os.dup2(src_fd, dst_fd)
# Open up a new file handle to temporarily replace the python-level io object, then yield.
new_dst = os.fdopen(dst_fd, mode)
setattr(sys, dst_sys_attribute, new_dst)
try:
yield
finally:
new_dst.close()
# Restore the python and os level file handles.
os.dup2(old_dst_fd, dst_fd)
setattr(sys, dst_sys_attribute, old_dst)
@contextmanager
def stdio_as(stdout_fd, stderr_fd, stdin_fd):
"""Redirect sys.{stdout, stderr, stdin} to alternate file descriptors.
As a special case, if a given destination fd is `-1`, we will replace it with an open file handle
to `/dev/null`.
NB: If the filehandles for sys.{stdout, stderr, stdin} have previously been closed, it's
possible that the OS has repurposed fds `0, 1, 2` to represent other files or sockets. It's
impossible for this method to locate all python objects which refer to those fds, so it's up
to the caller to guarantee that `0, 1, 2` are safe to replace.
"""
with _stdio_stream_as(stdin_fd, 0, 'stdin', 'rb'),\
_stdio_stream_as(stdout_fd, 1, 'stdout', 'wb'),\
_stdio_stream_as(stderr_fd, 2, 'stderr', 'wb'):
yield
@contextmanager
def signal_handler_as(sig, handler):
"""Temporarily replaces a signal handler for the given signal and restores the old handler.
:param int sig: The target signal to replace the handler for (e.g. signal.SIGINT).
:param func handler: The new temporary handler.
"""
old_handler = signal.signal(sig, handler)
try:
yield
finally:
signal.signal(sig, old_handler)
@contextmanager
def temporary_dir(root_dir=None, cleanup=True, suffix=str(), permissions=None, prefix=tempfile.template):
"""
A with-context that creates a temporary directory.
:API: public
You may specify the following keyword args:
:param string root_dir: The parent directory to create the temporary directory.
:param bool cleanup: Whether or not to clean up the temporary directory.
:param int permissions: If provided, sets the directory permissions to this mode.
"""
path = tempfile.mkdtemp(dir=root_dir, suffix=suffix, prefix=prefix)
try:
if permissions is not None:
os.chmod(path, permissions)
yield path
finally:
if cleanup:
shutil.rmtree(path, ignore_errors=True)
@contextmanager
def temporary_file_path(root_dir=None, cleanup=True, suffix='', permissions=None):
"""
A with-context that creates a temporary file and returns its path.
:API: public
You may specify the following keyword args:
:param str root_dir: The parent directory to create the temporary file.
:param bool cleanup: Whether or not to clean up the temporary file.
"""
with temporary_file(root_dir, cleanup=cleanup, suffix=suffix, permissions=permissions) as fd:
fd.close()
yield fd.name
@contextmanager
def temporary_file(root_dir=None, cleanup=True, suffix='', permissions=None):
"""
A with-context that creates a temporary file and returns a writeable file descriptor to it.
You may specify the following keyword args:
:param str root_dir: The parent directory to create the temporary file.
:param bool cleanup: Whether or not to clean up the temporary file.
:param str suffix: If suffix is specified, the file name will end with that suffix.
Otherwise there will be no suffix.
mkstemp() does not put a dot between the file name and the suffix;
if you need one, put it at the beginning of suffix.
See :py:class:`tempfile.NamedTemporaryFile`.
:param int permissions: If provided, sets the file to use these permissions.
"""
with tempfile.NamedTemporaryFile(suffix=suffix, dir=root_dir, delete=False) as fd:
try:
if permissions is not None:
os.chmod(fd.name, permissions)
yield fd
finally:
if cleanup:
safe_delete(fd.name)
@contextmanager
def safe_file(path, suffix=None, cleanup=True):
"""A with-context that copies a file, and copies the copy back to the original file on success.
This is useful for doing work on a file but only changing its state on success.
:param str suffix: Use this suffix to create the copy. Otherwise use a random string.
:param bool cleanup: Whether or not to clean up the copy.
"""
safe_path = '{0}.{1}'.format(path, suffix or uuid.uuid4())
if os.path.exists(path):
shutil.copy(path, safe_path)
try:
yield safe_path
if cleanup:
shutil.move(safe_path, path)
else:
shutil.copy(safe_path, path)
finally:
if cleanup:
safe_delete(safe_path)
@contextmanager
def pushd(directory):
"""
A with-context that encapsulates pushd/popd.
"""
cwd = os.getcwd()
os.chdir(directory)
try:
yield directory
finally:
os.chdir(cwd)
@contextmanager
def open_zip(path_or_file, *args, **kwargs):
"""A with-context for zip files.
Passes through *args and **kwargs to zipfile.ZipFile.
:API: public
:param path_or_file: Full path to zip file.
:param args: Any extra args accepted by `zipfile.ZipFile`.
:param kwargs: Any extra keyword args accepted by `zipfile.ZipFile`.
:raises: `InvalidZipPath` if path_or_file is invalid.
:raises: `zipfile.BadZipfile` if zipfile.ZipFile cannot open a zip at path_or_file.
:returns: `class 'contextlib.GeneratorContextManager`.
"""
if not path_or_file:
raise InvalidZipPath('Invalid zip location: {}'.format(path_or_file))
allowZip64 = kwargs.pop('allowZip64', True)
try:
zf = zipfile.ZipFile(path_or_file, *args, allowZip64=allowZip64, **kwargs)
except zipfile.BadZipfile as bze:
# Use the realpath in order to follow symlinks back to the problem source file.
raise zipfile.BadZipfile("Bad Zipfile {0}: {1}".format(os.path.realpath(path_or_file), bze))
try:
yield zf
finally:
zf.close()
@contextmanager
def open_tar(path_or_file, *args, **kwargs):
"""
A with-context for tar files. Passes through positional and kwargs to tarfile.open.
If path_or_file is a file, caller must close it separately.
"""
(path, fileobj) = ((path_or_file, None) if isinstance(path_or_file, string_types)
else (None, path_or_file))
with closing(TarFile.open(path, *args, fileobj=fileobj, **kwargs)) as tar:
yield tar
class Timer(object):
"""Very basic with-context to time operations
Example usage:
>>> from pants.util.contextutil import Timer
>>> with Timer() as timer:
... time.sleep(2)
...
>>> timer.elapsed
2.0020849704742432
"""
def __init__(self, clock=time):
self._clock = clock
def __enter__(self):
self.start = self._clock.time()
self.finish = None
return self
@property
def elapsed(self):
if self.finish:
return self.finish - self.start
else:
return self._clock.time() - self.start
def __exit__(self, typ, val, traceback):
self.finish = self._clock.time()
@contextmanager
def exception_logging(logger, msg):
"""Provides exception logging via `logger.exception` for a given block of code.
:param logging.Logger logger: The `Logger` instance to use for logging.
:param string msg: The message to emit before `logger.exception` emits the traceback.
"""
try:
yield
except Exception:
logger.exception(msg)
raise
@contextmanager
def maybe_profiled(profile_path):
"""A profiling context manager.
:param string profile_path: The path to write profile information to. If `None`, this will no-op.
"""
if not profile_path:
yield
return
import cProfile
profiler = cProfile.Profile()
try:
profiler.enable()
yield
finally:
profiler.disable()
profiler.dump_stats(profile_path)
view_cmd = green('gprof2dot -f pstats {path} | dot -Tpng -o {path}.png && open {path}.png'
.format(path=profile_path))
logging.getLogger().info(
'Dumped profile data to: {}\nUse e.g. {} to render and view.'.format(profile_path, view_cmd)
)
class HardSystemExit(SystemExit):
"""A SystemExit subclass that incurs an os._exit() via special handling."""
@contextmanager
def hard_exit_handler():
"""An exit helper for the daemon/fork'd context that provides for deferred os._exit(0) calls."""
try:
yield
except HardSystemExit:
os._exit(0)
| UnrememberMe/pants | src/python/pants/util/contextutil.py | Python | apache-2.0 | 10,875 |
import os
import errno
import shutil
import sys
import sqlite3
def root_dir():
return os.path.dirname(os.path.realpath(os.path.join(sys.argv[0],'..')))
def make_dir_if_not_found(dir_path):
if os.path.isdir(dir_path) is not True:
os.makedirs(dir_path)
return None
def delete_dir(dir_path):
if os.path.isdir(dir_path):
shutil.rmtree(dir_path)
return True
def renew_dir(dir_path):
'''
False if user abort
True if dir (re)created and is empty
'''
user_response = delete_dir(dir_path)
if user_response is False:
return False
else:
make_dir_if_not_found(dir_path)
return True
def initialise_subdirs(subdir_names):
'''Appends subdir names to the root directory, makes subdirs if they don't exist, and returns their absolute paths'''
dir_paths = tuple([os.path.normpath(os.path.join(root_dir(), x)) for x in subdir_names])
for dir_path in dir_paths:
make_dir_if_not_found(dir_path)
return dir_paths
def get_db_path():
return os.path.join(root_dir(), 'assets', 'network.db')
def lookup_region_key(name):
conn = sqlite3.connect(get_db_path())
region_cursor = conn.cursor()
region_cursor.execute('SELECT key, name FROM regions WHERE name = ?', (name.lower(),))
region = region_cursor.fetchone()
if region:
return region
else:
print '!', name, 'not found in region database'
raise ValueError | tommilligan/RainWorld-Map | src/common.py | Python | gpl-2.0 | 1,466 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.