code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
"""Sensor for Supervisord process status."""
import logging
import xmlrpc.client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_URL
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_DESCRIPTION = "description"
ATTR_GROUP = "group"
DEFAULT_URL = "http://localhost:9001/RPC2"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Supervisord platform."""
url = config.get(CONF_URL)
try:
supervisor_server = xmlrpc.client.ServerProxy(url)
processes = supervisor_server.supervisor.getAllProcessInfo()
except ConnectionRefusedError:
_LOGGER.error("Could not connect to Supervisord")
return False
add_entities(
[SupervisorProcessSensor(info, supervisor_server) for info in processes], True
)
class SupervisorProcessSensor(SensorEntity):
"""Representation of a supervisor-monitored process."""
def __init__(self, info, server):
"""Initialize the sensor."""
self._info = info
self._server = server
self._available = True
@property
def name(self):
"""Return the name of the sensor."""
return self._info.get("name")
@property
def state(self):
"""Return the state of the sensor."""
return self._info.get("statename")
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_DESCRIPTION: self._info.get("description"),
ATTR_GROUP: self._info.get("group"),
}
def update(self):
"""Update device state."""
try:
self._info = self._server.supervisor.getProcessInfo(
self._info.get("group") + ":" + self._info.get("name")
)
self._available = True
except ConnectionRefusedError:
_LOGGER.warning("Supervisord not available")
self._available = False
| [
[
[
52,
59
],
[
287,
294
]
],
[
[
67,
80
],
[
699,
705
]
],
[
[
89,
106
],
[
462,
465
]
],
[
[
152,
167
],
[
433,
448
]
],
[
[
169,
181
],
[
1057,
1069
]
],
[
[
214,
222
],
[
475,
483
],
[
652,
660
]
],
[
[
230,
275
],
[
507,
509
]
],
[
[
277,
284
],
[
842,
849
],
[
2221,
2228
]
],
[
[
316,
332
],
[
1806,
1822
]
],
[
[
349,
359
],
[
1867,
1877
]
],
[
[
371,
382
],
[
493,
504
]
],
[
[
415,
430
]
],
[
[
523,
537
]
],
[
[
1033,
1056
],
[
941,
964
]
]
] |
from app.schemas import filtration
from typing import Any
from uuid import UUID, uuid4
import celery
from datetime import datetime, timedelta
from app import crud, models, schemas
from app.celery import celery as celery_app
from app.api import deps
from app.core.config import settings
from app.utils import auth
from app.core import tasks
from fastapi import APIRouter, Depends, HTTPException, Body, Query
from sqlalchemy.orm import Session
from fastapi_utils.cbv import cbv
from fastapi_utils.inferring_router import InferringRouter
from fastapi_utils.tasks import repeat_every
from app.api.routes.base import BaseAuthCBV
from app.db.session import DatabaseSession
router = InferringRouter()
# @router.on_event("startup")
# @repeat_every(seconds=60, raise_exceptions=True)
# def scan_queues_on_servers() -> None:
# db = DatabaseSession()
# for plot_queue in crud.plot_queue.get_multi(db)[1]:
# tasks.scan_plotting.delay(plot_queue.id)
# db.close()
@cbv(router)
class TransferCBV(BaseAuthCBV):
@router.post("/")
def create_transfer(self, data: schemas.TransferCreate) -> schemas.TransferReturn:
plot = crud.plot.get(self.db, id=data.plot_id)
if plot is None:
raise HTTPException(404, "Plot with such id is not found")
if plot.status in [schemas.PlotStatus.PLOTING, schemas.PlotStatus.PENDING]:
raise HTTPException(403, "Can not transfer plotting and pending plots")
start_dir = plot.located_directory
dest_dir = crud.directory.get(self.db, id=data.destination_directory_id)
if dest_dir is None:
raise HTTPException(404, "Directory with such id is not found")
data_extended = schemas.TransferCreateExtended(
**data.dict(), starting_directory_id=start_dir.id
)
transfer = crud.transfer.create(self.db, obj_in=data_extended)
return schemas.TransferReturn.from_orm(transfer)
@router.get("/")
def get_transfers_table(
self,
filtration: schemas.FilterData[models.Transfer] = Depends(
deps.get_filtration_data(models.Transfer)
),
) -> schemas.Table[schemas.TransferReturn]:
amount, items = crud.transfer.get_multi(self.db, filtration=filtration)
return schemas.Table[schemas.TransferReturn](amount=amount, items=items)
| [
[
[
24,
34
]
],
[
[
54,
57
]
],
[
[
75,
79
]
],
[
[
81,
86
]
],
[
[
94,
100
]
],
[
[
123,
131
]
],
[
[
133,
142
]
],
[
[
159,
163
],
[
1145,
1149
],
[
1511,
1515
],
[
1826,
1830
],
[
2205,
2209
]
],
[
[
165,
171
],
[
2105,
2111
],
[
2040,
2046
]
],
[
[
173,
180
],
[
1106,
1113
],
[
1079,
1086
],
[
1308,
1315
],
[
1336,
1343
],
[
1703,
1710
],
[
1894,
1901
],
[
2142,
2149
],
[
2156,
2163
],
[
2021,
2028
],
[
2276,
2283
],
[
2290,
2297
]
],
[
[
204,
224
]
],
[
[
245,
249
],
[
2080,
2084
]
],
[
[
278,
286
]
],
[
[
309,
313
]
],
[
[
335,
340
]
],
[
[
361,
370
]
],
[
[
372,
379
],
[
2059,
2066
]
],
[
[
381,
394
],
[
1228,
1241
],
[
1383,
1396
],
[
1620,
1633
]
],
[
[
396,
400
]
],
[
[
402,
407
]
],
[
[
435,
442
]
],
[
[
473,
476
],
[
977,
980
]
],
[
[
520,
535
],
[
678,
693
]
],
[
[
568,
580
]
],
[
[
613,
624
],
[
1007,
1018
]
],
[
[
652,
667
]
],
[
[
669,
675
],
[
981,
987
],
[
1026,
1032
],
[
1942,
1948
]
],
[
[
995,
1006
]
]
] |
import re
import time
import json
import numpy as np
from collections import Counter
from utilities.utilities import VOWELS, LETTERS, get_vowel_count, get_available_words, log_list
start = time.time()
# 正解単語リストを開く
with open('data/answer-word-list.txt', mode='r') as f:
answer_word_list = f.read().split('\n')
# 入力可能単語リストを開く
with open('data/valid-word-list.txt', mode='r') as f:
valid_word_list = f.read().split('\n')
valid_word_list += answer_word_list
# can only use each letter once
word_list = [word for word in valid_word_list if len(set(word)) == 5]
log_list(word_list, "word_list")
result = []
for word_1 in word_list:
word_list_for_word_2 = get_available_words(
word_list, list(word_1))
for i_2, word_2 in enumerate(word_list_for_word_2):
word_list_for_word_3 = get_available_words(
word_list_for_word_2[i_2+1:], list(word_2))
for i_3, word_3 in enumerate(word_list_for_word_3):
word_list_for_word_4 = get_available_words(
word_list_for_word_3[i_3+1:], list(word_3))
for i_4, word_4 in enumerate(word_list_for_word_4):
word_list_for_word_5 = get_available_words(
word_list_for_word_4[i_4+1:], list(word_4))
print([word_1, word_2, word_3, word_4])
for word_5 in enumerate(word_list_for_word_5):
words = [word_1, word_2, word_3, word_4, word_5]
result.append(sorted(words))
log_list(result, "results are")
elapsed_time = time.time() - start
print("elapsed_time: {0}".format(elapsed_time))
with open('power_quintet.txt', 'w') as f:
f.write(json.dumps(result))
| [
[
[
7,
9
]
],
[
[
17,
21
],
[
190,
194
],
[
1543,
1547
]
],
[
[
29,
33
],
[
1666,
1670
]
],
[
[
41,
52
]
],
[
[
77,
84
]
],
[
[
117,
123
]
],
[
[
125,
132
]
],
[
[
134,
149
]
],
[
[
151,
170
],
[
670,
689
],
[
812,
831
],
[
985,
1004
],
[
1170,
1189
]
],
[
[
172,
180
],
[
572,
580
],
[
1495,
1503
]
],
[
[
182,
187
],
[
1557,
1562
]
],
[
[
268,
269
],
[
294,
295
]
],
[
[
275,
291
],
[
451,
467
]
],
[
[
382,
383
],
[
407,
408
]
],
[
[
389,
404
],
[
432,
447
]
],
[
[
501,
510
],
[
581,
590
],
[
632,
641
],
[
699,
708
]
],
[
[
606,
612
],
[
1464,
1470
],
[
1504,
1510
],
[
1677,
1683
]
],
[
[
622,
628
],
[
715,
721
],
[
1278,
1284
],
[
1404,
1410
]
],
[
[
647,
667
],
[
758,
778
],
[
845,
865
]
],
[
[
733,
736
],
[
866,
869
]
],
[
[
738,
744
],
[
880,
886
],
[
1286,
1292
],
[
1412,
1418
]
],
[
[
789,
809
],
[
927,
947
],
[
1022,
1042
]
],
[
[
902,
905
],
[
1043,
1046
]
],
[
[
907,
913
],
[
1057,
1063
],
[
1294,
1300
],
[
1420,
1426
]
],
[
[
962,
982
],
[
1108,
1128
],
[
1211,
1231
]
],
[
[
1083,
1086
],
[
1232,
1235
]
],
[
[
1088,
1094
],
[
1246,
1252
],
[
1302,
1308
],
[
1428,
1434
]
],
[
[
1147,
1167
],
[
1352,
1372
]
],
[
[
1332,
1338
],
[
1436,
1442
]
],
[
[
1395,
1400
],
[
1485,
1490
]
],
[
[
1528,
1540
],
[
1596,
1608
]
],
[
[
1651,
1652
],
[
1658,
1659
]
]
] |
"""Support gathering system information of hosts which are running glances."""
import logging
from homeassistant.const import CONF_NAME, STATE_UNAVAILABLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DATA_UPDATED, DOMAIN, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Glances sensors."""
client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = []
for sensor_type, sensor_details in SENSOR_TYPES.items():
if not sensor_details[0] in client.api.data:
continue
if sensor_details[0] in client.api.data:
if sensor_details[0] == "fs":
# fs will provide a list of disks attached
for disk in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
disk["mnt_point"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif sensor_details[0] == "sensors":
# sensors will provide temp for different devices
for sensor in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
sensor["label"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
"",
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
async_add_entities(dev, True)
class GlancesSensor(Entity):
"""Implementation of a Glances sensor."""
def __init__(
self,
glances_data,
name,
sensor_name_prefix,
sensor_name_suffix,
sensor_type,
sensor_details,
):
"""Initialize the sensor."""
self.glances_data = glances_data
self._sensor_name_prefix = sensor_name_prefix
self._sensor_name_suffix = sensor_name_suffix
self._name = name
self.type = sensor_type
self._state = None
self.sensor_details = sensor_details
self.unsub_update = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._sensor_name_prefix} {self._sensor_name_suffix}"
@property
def unique_id(self):
"""Set unique_id for sensor."""
return f"{self.glances_data.host}-{self.name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self.sensor_details[3]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.sensor_details[2]
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.glances_data.available
@property
def state(self):
"""Return the state of the resources."""
return self._state
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
async def async_added_to_hass(self):
"""Handle entity which will be added."""
self.unsub_update = async_dispatcher_connect(
self.hass, DATA_UPDATED, self._schedule_immediate_update
)
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def will_remove_from_hass(self):
"""Unsubscribe from update dispatcher."""
if self.unsub_update:
self.unsub_update()
self.unsub_update = None
async def async_update(self):
"""Get the latest data from REST API."""
value = self.glances_data.api.data
if value is None:
return
if value is not None:
if self.sensor_details[0] == "fs":
for var in value["fs"]:
if var["mnt_point"] == self._sensor_name_prefix:
disk = var
break
if self.type == "disk_use_percent":
self._state = disk["percent"]
elif self.type == "disk_use":
self._state = round(disk["used"] / 1024 ** 3, 1)
elif self.type == "disk_free":
try:
self._state = round(disk["free"] / 1024 ** 3, 1)
except KeyError:
self._state = round(
(disk["size"] - disk["used"]) / 1024 ** 3, 1,
)
elif self.type == "sensor_temp":
for sensor in value["sensors"]:
if sensor["label"] == self._sensor_name_prefix:
self._state = sensor["value"]
break
elif self.type == "memory_use_percent":
self._state = value["mem"]["percent"]
elif self.type == "memory_use":
self._state = round(value["mem"]["used"] / 1024 ** 2, 1)
elif self.type == "memory_free":
self._state = round(value["mem"]["free"] / 1024 ** 2, 1)
elif self.type == "swap_use_percent":
self._state = value["memswap"]["percent"]
elif self.type == "swap_use":
self._state = round(value["memswap"]["used"] / 1024 ** 3, 1)
elif self.type == "swap_free":
self._state = round(value["memswap"]["free"] / 1024 ** 3, 1)
elif self.type == "processor_load":
# Windows systems don't provide load details
try:
self._state = value["load"]["min15"]
except KeyError:
self._state = value["cpu"]["total"]
elif self.type == "process_running":
self._state = value["processcount"]["running"]
elif self.type == "process_total":
self._state = value["processcount"]["total"]
elif self.type == "process_thread":
self._state = value["processcount"]["thread"]
elif self.type == "process_sleeping":
self._state = value["processcount"]["sleeping"]
elif self.type == "cpu_use_percent":
self._state = value["quicklook"]["cpu"]
elif self.type == "docker_active":
count = 0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
count += 1
self._state = count
except KeyError:
self._state = count
elif self.type == "docker_cpu_use":
cpu_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
cpu_use += container["cpu"]["total"]
self._state = round(cpu_use, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
elif self.type == "docker_memory_use":
mem_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
mem_use += container["memory"]["usage"]
self._state = round(mem_use / 1024 ** 2, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
| [
[
[
86,
93
],
[
380,
387
]
],
[
[
127,
136
],
[
601,
610
]
],
[
[
138,
155
],
[
8188,
8205
],
[
8755,
8772
]
],
[
[
187,
195
],
[
4151,
4159
]
],
[
[
241,
265
],
[
4040,
4064
]
],
[
[
307,
313
],
[
2379,
2385
]
],
[
[
334,
346
],
[
4089,
4101
]
],
[
[
348,
354
],
[
541,
547
]
],
[
[
356,
368
],
[
665,
677
],
[
1191,
1203
],
[
1290,
1302
],
[
1760,
1772
],
[
1859,
1871
],
[
2164,
2176
],
[
2255,
2267
]
],
[
[
370,
377
]
],
[
[
410,
2356
]
],
[
[
2365,
2378
],
[
1031,
1044
],
[
1602,
1615
],
[
2035,
2048
]
]
] |
import logging
import os
import tempfile
import threading
from contextlib import contextmanager
from typing import Dict
from funcy import retry, wrap_with
from dvc.exceptions import (
FileMissingError,
NoOutputInExternalRepoError,
NoRemoteInExternalRepoError,
NotDvcRepoError,
OutputNotFoundError,
PathMissingError,
)
from dvc.repo import Repo
from dvc.utils import relpath
logger = logging.getLogger(__name__)
@contextmanager
def external_repo(
url, rev=None, for_write=False, cache_dir=None, cache_types=None, **kwargs
):
from dvc.config import NoRemoteError
from dvc.scm.git import Git
logger.debug("Creating external repo %s@%s", url, rev)
path = _cached_clone(url, rev, for_write=for_write)
# Local HEAD points to the tip of whatever branch we first cloned from
# (which may not be the default branch), use origin/HEAD here to get
# the tip of the default branch
rev = rev or "refs/remotes/origin/HEAD"
cache_config = {
"cache": {
"dir": cache_dir or _get_cache_dir(url),
"type": cache_types,
}
}
config = _get_remote_config(url) if os.path.isdir(url) else {}
config.update(cache_config)
def make_repo(path, **_kwargs):
_config = cache_config.copy()
if os.path.isdir(url):
rel = os.path.relpath(path, _kwargs["scm"].root_dir)
repo_path = os.path.join(url, rel)
_config.update(_get_remote_config(repo_path))
return Repo(path, config=_config, **_kwargs)
root_dir = path if for_write else os.path.realpath(path)
repo_kwargs = dict(
root_dir=root_dir,
url=url,
scm=None if for_write else Git(root_dir),
rev=None if for_write else rev,
config=config,
repo_factory=make_repo,
**kwargs,
)
if "subrepos" not in repo_kwargs:
repo_kwargs["subrepos"] = True
if "uninitialized" not in repo_kwargs:
repo_kwargs["uninitialized"] = True
repo = Repo(**repo_kwargs)
try:
yield repo
except NoRemoteError as exc:
raise NoRemoteInExternalRepoError(url) from exc
except OutputNotFoundError as exc:
if exc.repo is repo:
raise NoOutputInExternalRepoError(
exc.output, repo.root_dir, url
) from exc
raise
except FileMissingError as exc:
raise PathMissingError(exc.path, url) from exc
finally:
repo.close()
if for_write:
_remove(path)
CLONES: Dict[str, str] = {}
CACHE_DIRS: Dict[str, str] = {}
@wrap_with(threading.Lock())
def _get_cache_dir(url):
try:
cache_dir = CACHE_DIRS[url]
except KeyError:
cache_dir = CACHE_DIRS[url] = tempfile.mkdtemp("dvc-cache")
return cache_dir
def clean_repos():
# Outside code should not see cache while we are removing
paths = [path for path, _ in CLONES.values()] + list(CACHE_DIRS.values())
CLONES.clear()
CACHE_DIRS.clear()
for path in paths:
_remove(path)
def _get_remote_config(url):
try:
repo = Repo(url)
except NotDvcRepoError:
return {}
try:
name = repo.config["core"].get("remote")
if not name:
# Fill the empty upstream entry with a new remote pointing to the
# original repo's cache location.
name = "auto-generated-upstream"
return {
"core": {"remote": name},
"remote": {name: {"url": repo.odb.local.cache_dir}},
}
# Use original remote to make sure that we are using correct url,
# credential paths, etc if they are relative to the config location.
return {"remote": {name: repo.config["remote"][name]}}
finally:
repo.close()
def _cached_clone(url, rev, for_write=False):
"""Clone an external git repo to a temporary directory.
Returns the path to a local temporary directory with the specified
revision checked out. If for_write is set prevents reusing this dir via
cache.
"""
from distutils.dir_util import copy_tree
# even if we have already cloned this repo, we may need to
# fetch/fast-forward to get specified rev
clone_path, shallow = _clone_default_branch(url, rev, for_write=for_write)
if not for_write and (url) in CLONES:
return CLONES[url][0]
# Copy to a new dir to keep the clone clean
repo_path = tempfile.mkdtemp("dvc-erepo")
logger.debug("erepo: making a copy of %s clone", url)
copy_tree(clone_path, repo_path)
# Check out the specified revision
if for_write:
_git_checkout(repo_path, rev)
else:
CLONES[url] = (repo_path, shallow)
return repo_path
@wrap_with(threading.Lock())
def _clone_default_branch(url, rev, for_write=False):
"""Get or create a clean clone of the url.
The cloned is reactualized with git pull unless rev is a known sha.
"""
from dvc.scm.git import Git
clone_path, shallow = CLONES.get(url, (None, False))
git = None
try:
if clone_path:
git = Git(clone_path)
# Do not pull for known shas, branches and tags might move
if not Git.is_sha(rev) or not git.has_rev(rev):
if shallow:
# If we are missing a rev in a shallow clone, fallback to
# a full (unshallowed) clone. Since fetching specific rev
# SHAs is only available in certain git versions, if we
# have need to reference multiple specific revs for a
# given repo URL it is easier/safer for us to work with
# full clones in this case.
logger.debug("erepo: unshallowing clone for '%s'", url)
_unshallow(git)
shallow = False
CLONES[url] = (clone_path, shallow)
else:
logger.debug("erepo: git pull '%s'", url)
git.pull()
else:
logger.debug("erepo: git clone '%s' to a temporary dir", url)
clone_path = tempfile.mkdtemp("dvc-clone")
if not for_write and rev and not Git.is_sha(rev):
# If rev is a tag or branch name try shallow clone first
from dvc.scm.base import CloneError
try:
git = Git.clone(url, clone_path, shallow_branch=rev)
shallow = True
logger.debug(
"erepo: using shallow clone for branch '%s'", rev
)
except CloneError:
pass
if not git:
git = Git.clone(url, clone_path)
shallow = False
CLONES[url] = (clone_path, shallow)
finally:
if git:
git.close()
return clone_path, shallow
def _unshallow(git):
if git.gitpython.repo.head.is_detached:
# If this is a detached head (i.e. we shallow cloned a tag) switch to
# the default branch
origin_refs = git.gitpython.repo.remotes["origin"].refs
ref = origin_refs["HEAD"].reference
branch_name = ref.name.split("/")[-1]
branch = git.gitpython.repo.create_head(branch_name, ref)
branch.set_tracking_branch(ref)
branch.checkout()
git.pull(unshallow=True)
def _git_checkout(repo_path, rev):
from dvc.scm.git import Git
logger.debug("erepo: git checkout %s@%s", repo_path, rev)
git = Git(repo_path)
try:
git.checkout(rev)
finally:
git.close()
def _remove(path):
from dvc.utils.fs import remove
if os.name == "nt":
# git.exe may hang for a while not permitting to remove temp dir
os_retry = retry(5, errors=OSError, timeout=0.1)
try:
os_retry(remove)(path)
except PermissionError:
logger.warning(
"Failed to remove '%s'", relpath(path), exc_info=True
)
else:
remove(path)
| [
[
[
7,
14
],
[
410,
417
]
],
[
[
22,
24
],
[
1158,
1160
],
[
1585,
1587
],
[
7712,
7714
],
[
1303,
1305
],
[
1341,
1343
],
[
1412,
1414
]
],
[
[
32,
40
],
[
2755,
2763
],
[
4457,
4465
],
[
6158,
6166
]
],
[
[
48,
57
],
[
2608,
2617
],
[
4765,
4774
]
],
[
[
81,
95
],
[
441,
455
]
],
[
[
115,
119
],
[
2543,
2547
],
[
2575,
2579
]
],
[
[
139,
144
],
[
7821,
7826
]
],
[
[
146,
155
],
[
2598,
2607
],
[
4755,
4764
]
],
[
[
190,
206
],
[
2371,
2387
]
],
[
[
212,
239
],
[
2247,
2274
]
],
[
[
245,
272
],
[
2119,
2146
]
],
[
[
278,
293
],
[
3131,
3146
]
],
[
[
299,
318
],
[
2172,
2191
]
],
[
[
324,
340
],
[
2410,
2426
]
],
[
[
365,
369
],
[
2023,
2027
],
[
3110,
3114
],
[
1508,
1512
]
],
[
[
392,
399
],
[
8008,
8015
]
],
[
[
401,
407
],
[
635,
641
],
[
4491,
4497
],
[
5746,
5752
],
[
5972,
5978
],
[
6071,
6077
],
[
6525,
6531
],
[
7496,
7502
],
[
7951,
7957
]
],
[
[
460,
473
]
],
[
[
2535,
2541
],
[
2543,
2557
],
[
2922,
2928
],
[
2971,
2977
],
[
4354,
4360
],
[
4377,
4383
],
[
4696,
4702
],
[
5024,
5030
],
[
5894,
5900
],
[
6812,
6818
]
],
[
[
2563,
2573
],
[
2575,
2589
],
[
2680,
2690
],
[
2737,
2747
],
[
2946,
2956
],
[
2990,
3000
]
],
[
[
2630,
2644
],
[
1047,
1061
]
],
[
[
2812,
2823
]
],
[
[
3061,
3079
],
[
1131,
1149
],
[
1462,
1480
]
],
[
[
3816,
3829
],
[
701,
714
]
],
[
[
4787,
4808
],
[
4266,
4287
]
],
[
[
6939,
6949
],
[
5822,
5832
]
],
[
[
7428,
7441
],
[
4648,
4661
]
],
[
[
7653,
7660
],
[
2519,
2526
],
[
3041,
3048
]
]
] |
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from .views import AuthorSignupView, AuthorList, AuthorDetailView
urlpatterns = [
url(r'^$', AuthorList.as_view(), name='author-list'),
url(r'^(?P<pk>\d+)/$', AuthorDetailView, name='author-rud'),
url(r'^signup/$', AuthorSignupView, name='author-signup'),
] | [
[
[
29,
32
],
[
183,
186
],
[
241,
244
],
[
306,
309
]
],
[
[
72,
94
]
],
[
[
115,
131
],
[
324,
340
]
],
[
[
133,
143
],
[
194,
204
]
],
[
[
145,
161
],
[
264,
280
]
],
[
[
163,
174
]
]
] |
from unittest import TestCase
from unittest.mock import Mock, call
import pandas as pd
from sdv.metadata import Metadata
from sdv.modeler import Modeler
from sdv.models.base import SDVModel
from sdv.models.copulas import GaussianCopula
class TestModeler(TestCase):
def test___init__default(self):
"""Test create new Modeler instance with default values"""
# Run
modeler = Modeler('test')
# Asserts
assert modeler.models == dict()
assert modeler.metadata == 'test'
assert modeler.model == GaussianCopula
assert modeler.model_kwargs == dict()
def test___init__with_arguments(self):
# Run
model = Mock()
modeler = Modeler({'some': 'metadata'}, model=model, model_kwargs={'some': 'kwargs'})
# Asserts
assert modeler.models == dict()
assert modeler.metadata == {'some': 'metadata'}
assert modeler.model == model
assert modeler.model_kwargs == {'some': 'kwargs'}
def test__get_extensions(self):
"""Test get list of extensions from childs"""
# Setup
model = Mock(spec=SDVModel)
model.return_value = model
model.get_parameters.side_effect = [
{'model': 'data 1'},
{'model': 'data 2'},
{'model': 'data 3'}
]
modeler = Mock(spec=Modeler)
modeler.model = model
modeler.model_kwargs = dict()
modeler.metadata = Mock(spec=Metadata)
# Run
child_table = pd.DataFrame({'foo': ['aaa', 'bbb', 'ccc']})
result = Modeler._get_extension(modeler, 'some_name', child_table, 'foo')
# Asserts
expected = pd.DataFrame({
'__some_name__model': ['data 1', 'data 2', 'data 3'],
'__some_name__child_rows': [1, 1, 1]
}, index=['aaa', 'bbb', 'ccc'])
pd.testing.assert_frame_equal(result, expected)
assert model.get_parameters.call_count == 3
def test_cpa_with_tables_no_primary_key(self):
"""Test CPA with tables and no primary key."""
# Setup
modeler = Mock(spec=Modeler)
modeler.metadata = Mock(spec=Metadata)
modeler.model = Mock(spec=SDVModel)
modeler.model_kwargs = dict()
modeler.models = dict()
modeler.table_sizes = {'data': 5}
modeler.metadata.transform.return_value = pd.DataFrame({'data': [1, 2, 3]})
modeler.metadata.get_primary_key.return_value = None
# Run
tables = {'test': pd.DataFrame({'data': ['a', 'b', 'c']})}
result = Modeler.cpa(modeler, 'test', tables)
# Asserts
expected = pd.DataFrame({'data': [1, 2, 3]})
expected_transform_call = pd.DataFrame({'data': ['a', 'b', 'c']})
assert modeler.metadata.load_table.call_count == 0
assert modeler.metadata.transform.call_args[0][0] == 'test'
pd.testing.assert_frame_equal(
modeler.metadata.transform.call_args[0][1],
expected_transform_call
)
pd.testing.assert_frame_equal(result, expected)
def test_model_database(self):
"""Test model using RCPA"""
# Setup
def rcpa_side_effect(table_name, tables):
tables[table_name] = table_name
metadata_table_names = ['foo', 'bar', 'tar']
metadata_parents = [None, 'bar_parent', None]
modeler = Mock()
modeler.metadata.get_tables.return_value = metadata_table_names
modeler.metadata.get_parents.side_effect = metadata_parents
modeler.rcpa.side_effect = rcpa_side_effect
modeler.models = dict()
# Run
Modeler.model_database(modeler)
# Asserts
expected_metadata_parents_call_count = 3
expected_metadata_parents_call = [call('foo'), call('bar'), call('tar')]
assert modeler.metadata.get_parents.call_count == expected_metadata_parents_call_count
assert modeler.metadata.get_parents.call_args_list == expected_metadata_parents_call
| [
[
[
21,
29
],
[
258,
266
]
],
[
[
56,
60
],
[
689,
693
],
[
1124,
1128
],
[
1351,
1355
],
[
1465,
1469
],
[
2106,
2110
],
[
2152,
2156
],
[
2196,
2200
],
[
3389,
3393
]
],
[
[
62,
66
],
[
3785,
3789
],
[
3798,
3802
],
[
3811,
3815
]
],
[
[
75,
87
],
[
1522,
1524
],
[
1687,
1689
],
[
1865,
1867
],
[
2378,
2380
],
[
2514,
2516
],
[
2647,
2649
],
[
2715,
2717
],
[
2891,
2893
],
[
3032,
3034
]
],
[
[
114,
122
],
[
1475,
1483
],
[
2162,
2170
]
],
[
[
147,
154
],
[
405,
412
],
[
714,
721
],
[
1361,
1368
],
[
1584,
1591
],
[
2116,
2123
],
[
2572,
2579
],
[
3643,
3650
]
],
[
[
183,
191
],
[
1134,
1142
],
[
2206,
2214
]
],
[
[
223,
237
],
[
554,
568
]
],
[
[
246,
257
]
]
] |
test = {
'name': 'q2_1_3',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> np.isclose(distance_from_batman_returns('titanic'), 0.0023550202650824965)
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| [
[
[
0,
4
]
]
] |
import unittest
from typing import NoReturn
import marshmallow
import urllib3
import vaa
import deal
import pytest
class TestPreDeal:
@pytest.mark.parametrize('correct,incorrect', [(1, -1), (2, -2), (3, -3), (5, -5), (7, -7), (11, -11)])
def test_pre_contract_fulfilled(self, correct, incorrect):
func = deal.pre(lambda x: x > 0)(lambda x: x)
assert func(correct) == correct
with pytest.raises(deal.PreContractError):
func(incorrect)
@pytest.mark.parametrize('correct,incorrect_min,incorrect_max',
[(1, -1, 20), (2, -2, 21), (3, -3, 22), (5, -5, 23), (7, -7, 24), (9, -11, 25)])
def test_chain_all_contracts_fulfilled(self, correct, incorrect_min, incorrect_max):
func = deal.pre(lambda x: x < 10)(lambda x: x)
func = deal.pre(lambda x: x > 0)(func)
assert func(correct) == correct
with pytest.raises(deal.PreContractError):
func(incorrect_min)
with pytest.raises(deal.PreContractError):
func(incorrect_max)
def test_correct_exceptions_raised_on_contract_fail(self):
func = deal.pre(lambda x: x > 0)(lambda x: x)
with pytest.raises(deal.PreContractError):
func(-2)
func = deal.pre(lambda x: x > 0, message='TEST')(lambda x: x)
try:
func(-2)
except AssertionError as e:
assert e.args[0] == 'TEST'
func = deal.pre(lambda x: x > 0, exception=NameError)(lambda x: x)
with pytest.raises(NameError):
func(-2)
func = deal.pre(lambda x: x > 0, exception=NameError('TEST'))(lambda x: x)
with pytest.raises(NameError):
func(-2)
try:
func(-2)
except NameError as e:
assert e.args[0] == 'TEST'
func = deal.pre(lambda x: x > 0, message='TEST', exception=NameError)(lambda x: x)
with pytest.raises(NameError):
func(-2)
try:
func(-2)
except NameError as e:
assert e.args[0] == 'TEST'
def test_raise_error_with_param_on_contract_failure(self):
func = deal.pre(lambda x: x > 0 or 'TEST')(lambda x: x)
assert func(4) == 4
with pytest.raises(deal.PreContractError):
func(-2)
try:
func(-2)
except deal.PreContractError as e:
assert e.args[0] == 'TEST'
def test_method_decoration_name_is_correct(self):
@deal.pre(lambda x: x > 0)
def some_function(x):
return x
assert some_function.__name__ == 'some_function'
def test_class_method_decorator_raises_error_on_contract_fail(self):
class Class:
y = 7
@deal.pre(lambda self, x: x > 0)
def method(self, x):
return x * 2
@deal.pre(lambda self, x: x > 0)
def method2(self, y):
return self.y
assert Class().method(2) == 4
assert Class().method2(2) == 7
with pytest.raises(deal.PreContractError):
Class().method(-2)
with pytest.raises(deal.PreContractError):
Class().method2(-2)
# ignored test
def _test_validator(self, validator):
func = deal.pre(validator)(lambda x: x)
assert func(4) == 4
with pytest.raises(deal.PreContractError):
func(-2)
try:
func(-2)
except deal.PreContractError as e:
assert e.args[0] == 'TEST'
class TestPostDeal:
def test_return_value_fulfils_contract(self):
func = deal.post(lambda x: x > 0)(lambda x: -x)
assert func(-4) == 4
with pytest.raises(deal.PostContractError):
func(4)
class TestInvDeal:
def test_setting_object_attribute_fulfills_contract(self):
@deal.inv(lambda obj: obj.x > 0)
class A:
x = 2
a = A()
a.x = 4
with pytest.raises(deal.InvContractError):
a.x = -2
def test_setting_wrong_args_by_method_raises_error(self):
@deal.inv(lambda obj: obj.x > 0)
class A:
x = 2
def f(self, x):
self.x = x
a = A()
a.f(4)
with pytest.raises(deal.InvContractError):
a.f(-2)
def test_chain_contracts_both_fulfill(self):
@deal.inv(lambda obj: obj.x > 0)
@deal.inv(lambda obj: obj.x < 10)
class A:
x = 2
a = A()
a.x = 4
with pytest.raises(deal.InvContractError):
a.x = -2
with pytest.raises(deal.InvContractError):
a.x = 20
def test_patched_invariants_instance(self):
class A:
x = 2
PatchedA = deal.inv(lambda obj: obj.x > 0)(A) # noQA
a = PatchedA()
assert isinstance(a, PatchedA)
assert isinstance(a, A)
PatchedA2 = deal.inv(lambda obj: obj.x > 0)(PatchedA) # noQA
a = PatchedA2()
assert isinstance(a, PatchedA)
assert isinstance(a, PatchedA2)
assert isinstance(a, A)
assert a.__class__.__name__.count('Invarianted') == 1
class MarshmallowSchemeTests(unittest.TestCase):
def setUp(self):
class _Scheme(marshmallow.Schema):
name = marshmallow.fields.Str()
self.Scheme = vaa.marshmallow(_Scheme)
def test_scheme_string_validation_args_correct(self):
@deal.pre(self.Scheme)
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
with pytest.raises(deal.PreContractError):
func(123)
try:
func(123)
except deal.PreContractError as e:
assert e.args[0] == {'name': ['Not a valid string.']}
def test_method_chain_decorator_with_scheme_is_fulfilled(self):
@deal.pre(self.Scheme)
@deal.pre(lambda name: name != 'Oleg')
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
with pytest.raises(deal.PreContractError):
func(123)
with pytest.raises(deal.PreContractError):
func('Oleg')
def test_scheme_contract_is_satisfied_when_setting_arg(self):
@deal.inv(self.Scheme)
class User:
name = ''
user = User()
user.name = 'Chris'
with pytest.raises(deal.InvContractError):
user.name = 123
try:
user.name = 123
except deal.InvContractError as e:
assert e.args[0] == {'name': ['Not a valid string.']}
def test_scheme_contract_is_satisfied_within_chain(self):
@deal.inv(lambda user: user.name != 'Oleg')
@deal.inv(self.Scheme)
@deal.inv(lambda user: user.name != 'Chris')
class User:
name = ''
user = User()
user.name = 'Gram'
user = User()
with pytest.raises(deal.InvContractError):
user.name = 'Oleg'
user = User()
with pytest.raises(deal.InvContractError):
user.name = 123
user = User()
with pytest.raises(deal.InvContractError):
user.name = 'Chris'
def test_scheme_contract_is_satisfied_when_passing_args(self):
@deal.pre(self.Scheme)
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
assert func(name='Chris') == 'ChrisChris'
@deal.pre(self.Scheme)
def func(**kwargs):
return kwargs['name'] * 3
assert func(name='Chris') == 'ChrisChrisChris'
@deal.pre(self.Scheme)
def func(name='Max'):
return name * 2
assert func() == 'MaxMax'
class TestDefaultScheme(MarshmallowSchemeTests):
def setUp(self):
class MyScheme(deal.Scheme):
def is_valid(self):
if not isinstance(self.data['name'], str):
self.errors = {'name': ['Not a valid string.']}
return False
return True
self.Scheme = MyScheme
class TestRaises:
def test_raises_expects_function_to_raise_error(self):
func = deal.raises(ZeroDivisionError)(lambda x: 1 / x)
with pytest.raises(ZeroDivisionError):
func(0)
func(2)
func = deal.raises(KeyError)(lambda x: 1 / x)
with pytest.raises(deal.RaisesContractError):
func(0)
def test_raises_doesnt_override_another_constract(self):
@deal.raises(ZeroDivisionError)
@deal.offline
def func(do, number):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
1 / number
func(False, 1)
with pytest.raises(deal.OfflineContractError):
func(True, 1)
with pytest.raises(ZeroDivisionError):
func(False, 0)
class TestOffline:
def test_network_request_in_offline_raises_exception(self):
@deal.offline
def func(do):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False)
with pytest.raises(deal.OfflineContractError):
func(True)
def test_network_request_in_offline_and_raises_specified_exception(self):
@deal.offline(exception=KeyError)
def func(do):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False)
with pytest.raises(KeyError):
func(True)
class TestSilent:
def test_silent_contract_not_allow_print(self):
@deal.silent
def func(msg):
if msg:
print(msg)
func(None)
with pytest.raises(deal.SilentContractError):
func('bad')
class TestChain:
def test_chained_contract_decorator(self):
@deal.chain(deal.silent, deal.offline)
def func(msg, do):
if msg:
print(msg)
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False, False)
with pytest.raises(deal.SilentContractError):
func(True, False)
with pytest.raises(deal.OfflineContractError):
func(False, True)
class TestState:
def setUp(self):
deal.reset()
def tearDown(self):
deal.reset()
def test_contract_state_switch_custom_param(self):
func = deal.pre(lambda x: x > 0, debug=True)(lambda x: x * 2)
deal.switch(debug=False)
func(-2)
deal.switch(debug=True)
with pytest.raises(deal.PreContractError):
func(-2)
def test_contract_state_switch_default_param(self):
func = deal.pre(lambda x: x > 0)(lambda x: x * 2)
deal.switch(main=False)
func(-2)
deal.switch(main=True)
with pytest.raises(deal.PreContractError):
func(-2)
class TestEnsure:
def test_parameters_and_result_fulfill_constact(self):
@deal.ensure(lambda a, b, result: a > 0 and b > 0 and result != 'same number')
def func(a, b):
if a == b:
return 'same number'
else:
return 'different numbers'
assert func(1, 2) == 'different numbers'
with pytest.raises(deal.PostContractError):
func(0, 1)
with pytest.raises(deal.PostContractError):
func(1, 0)
with pytest.raises(deal.PostContractError):
func(1, 1)
class CaseTest(unittest.TestCase):
def setUp(self):
@deal.raises(ZeroDivisionError)
@deal.pre(lambda a, b: a > 0 and b > 0)
def div(a: int, b: int) -> float:
assert isinstance(a, int)
assert isinstance(b, int)
assert a > 0
assert b > 0
return a / b
self.func = div
def test_count(self):
for count in (1, 10, 20, 50):
cases = deal.cases(self.func, count=count)
assert len(list(cases)) == count
def test_params_detected(self):
for case in deal.cases(self.func, count=10):
assert set(case.kwargs) == {'a', 'b'}
def test_params_type(self):
for case in deal.cases(self.func, count=10):
assert type(case.kwargs['a']) is int
assert type(case.kwargs['b']) is int
def test_params_ok_with_excs(self):
results = []
for case in deal.cases(self.func, count=20):
result = case()
results.append(result)
assert any(r is not NoReturn for r in results), 'exception occured on every run'
assert any(r is NoReturn for r in results), 'no exception occured'
def test_return_type_checks(self):
def div(a: int, b: int):
return 1
for case in deal.cases(div, count=20):
case()
def div(a: int, b: int) -> str:
return 1
with pytest.raises(TypeError):
case = next(iter(deal.cases(div, count=20)))
case()
def test_explicit_kwargs(self):
def div(a: int, b: int):
assert b == 4
for case in deal.cases(div, kwargs=dict(b=4), count=20):
case()
if __name__ == '__main__':
pytest.main(['tests.py'])
| [
[
[
7,
15
],
[
5195,
5203
],
[
11689,
11697
]
],
[
[
35,
43
],
[
12731,
12739
],
[
12816,
12824
]
],
[
[
52,
63
],
[
5258,
5269
],
[
5298,
5309
]
],
[
[
71,
78
],
[
8639,
8646
],
[
9104,
9111
],
[
9480,
9487
],
[
10144,
10151
]
],
[
[
86,
89
],
[
5346,
5349
]
],
[
[
98,
102
],
[
324,
328
],
[
430,
434
],
[
765,
769
],
[
820,
824
],
[
919,
923
],
[
1002,
1006
],
[
1137,
1141
],
[
1203,
1207
],
[
1264,
1268
],
[
1444,
1448
],
[
1580,
1584
],
[
1828,
1832
],
[
2147,
2151
],
[
2252,
2256
],
[
2347,
2351
],
[
2478,
2482
],
[
2740,
2744
],
[
2848,
2852
],
[
3049,
3053
],
[
3131,
3135
],
[
3264,
3268
],
[
3353,
3357
],
[
3448,
3452
],
[
3602,
3606
],
[
3700,
3704
],
[
3838,
3842
],
[
3965,
3969
],
[
4082,
4086
],
[
4265,
4269
],
[
4368,
4372
],
[
4409,
4413
],
[
4537,
4541
],
[
4609,
4613
],
[
4758,
4762
],
[
4916,
4920
],
[
5439,
5443
],
[
5587,
5591
],
[
5684,
5688
],
[
5856,
5860
],
[
5887,
5891
],
[
6051,
6055
],
[
6125,
6129
],
[
6250,
6254
],
[
6394,
6398
],
[
6503,
6507
],
[
6669,
6673
],
[
6721,
6725
],
[
6752,
6756
],
[
6938,
6942
],
[
7043,
7047
],
[
7145,
7149
],
[
7278,
7282
],
[
7459,
7463
],
[
7613,
7617
],
[
7823,
7827
],
[
8183,
8187
],
[
8330,
8334
],
[
8396,
8400
],
[
8514,
8518
],
[
8554,
8558
],
[
8804,
8808
],
[
9027,
9031
],
[
9243,
9247
],
[
9383,
9387
],
[
9734,
9738
],
[
9863,
9867
],
[
9990,
9994
],
[
10001,
10005
],
[
10014,
10018
],
[
10290,
10294
],
[
10374,
10378
],
[
10480,
10484
],
[
10526,
10530
],
[
10610,
10614
],
[
10673,
10677
],
[
10723,
10727
],
[
10774,
10778
],
[
10891,
10895
],
[
10942,
10946
],
[
10991,
10995
],
[
11041,
11045
],
[
11174,
11178
],
[
11474,
11478
],
[
11549,
11553
],
[
11624,
11628
],
[
11739,
11743
],
[
11779,
11783
],
[
12121,
12125
],
[
12258,
12262
],
[
12394,
12398
],
[
12607,
12611
],
[
12982,
12986
],
[
13159,
13163
],
[
13323,
13327
]
],
[
[
110,
116
],
[
143,
149
],
[
488,
494
],
[
13420,
13426
],
[
416,
422
],
[
905,
911
],
[
988,
994
],
[
1189,
1195
],
[
1517,
1523
],
[
1661,
1667
],
[
1917,
1923
],
[
2238,
2244
],
[
3035,
3041
],
[
3117,
3123
],
[
3339,
3345
],
[
3686,
3692
],
[
3951,
3957
],
[
4251,
4257
],
[
4523,
4529
],
[
4595,
4601
],
[
5573,
5579
],
[
6037,
6043
],
[
6111,
6117
],
[
6380,
6386
],
[
6924,
6930
],
[
7029,
7035
],
[
7131,
7137
],
[
8244,
8250
],
[
8382,
8388
],
[
8790,
8796
],
[
8871,
8877
],
[
9229,
9235
],
[
9605,
9611
],
[
9849,
9855
],
[
10276,
10282
],
[
10360,
10366
],
[
10760,
10766
],
[
11027,
11033
],
[
11460,
11466
],
[
11535,
11541
],
[
11610,
11616
],
[
13104,
13110
]
],
[
[
125,
136
]
],
[
[
3523,
3535
]
],
[
[
3753,
3764
]
],
[
[
5172,
5194
],
[
7754,
7776
]
],
[
[
7736,
7753
]
],
[
[
8097,
8107
]
],
[
[
8940,
8951
]
],
[
[
9661,
9671
]
],
[
[
9922,
9931
]
],
[
[
10440,
10449
]
],
[
[
11094,
11104
]
],
[
[
11680,
11688
]
]
] |
"""Class client for atome protocol."""
import json
import logging
import requests
import simplejson
from fake_useragent import UserAgent
# export const
DAILY_PERIOD_TYPE = "day"
WEEKLY_PERIOD_TYPE = "week"
MONTHLY_PERIOD_TYPE = "month"
YEARLY_PERIOD_TYPE = "year"
# internal const
COOKIE_NAME = "PHPSESSID"
API_BASE_URI = "https://esoftlink.esoftthings.com"
API_ENDPOINT_LOGIN = "/api/user/login.json"
API_ENDPOINT_LIVE = "/measure/live.json"
API_ENDPOINT_CONSUMPTION = "/consumption.json"
LOGIN_URL = API_BASE_URI + API_ENDPOINT_LOGIN
DEFAULT_TIMEOUT = 10
MAX_RETRIES = 3
_LOGGER = logging.getLogger(__name__)
class PyAtomeError(Exception):
"""Exception class."""
pass
class AtomeClient(object):
"""The client class."""
def __init__(
self, username, password, atome_linky_number=1, session=None, timeout=None
):
"""Initialize the client object."""
self.username = username
self.password = password
self._user_id = None
self._user_reference = None
self._session = session
self._data = {}
self._timeout = timeout
# internal array start from 0 and not 1. Shift by 1.
self._atome_linky_number = int(atome_linky_number) - 1
def login(self):
"""Set http session."""
if self._session is None:
self._session = requests.session()
# adding fake user-agent header
self._session.headers.update({"User-agent": str(UserAgent().random)})
return self._login()
def _login(self):
"""Login to Atome's API."""
error_flag = False
payload = {"email": self.username, "plainPassword": self.password}
try:
req = self._session.post(
LOGIN_URL,
json=payload,
headers={"content-type": "application/json"},
timeout=self._timeout,
)
except OSError:
_LOGGER.debug("Can not login to API")
error_flag = True
if error_flag:
return None
try:
response_json = req.json()
user_id = str(response_json["id"])
user_reference = response_json["subscriptions"][self._atome_linky_number][
"reference"
]
self._user_id = user_id
self._user_reference = user_reference
except (
KeyError,
OSError,
json.decoder.JSONDecodeError,
simplejson.errors.JSONDecodeError,
) as e:
_LOGGER.debug(
"Impossible to decode response: \nResponse was: [%s] %s",
str(e),
str(req.status_code),
str(req.text),
)
error_flag = True
if error_flag:
return None
return response_json
def get_user_reference(self):
"""Get user reference respect to linky number."""
return self._user_reference
def _get_info_from_server(self, url, max_retries=0):
error_flag = False
if max_retries > MAX_RETRIES:
_LOGGER.debug("Can't gather proper data. Max retries exceeded.")
error_flag = True
return None
try:
req = self._session.get(url, timeout=self._timeout)
except OSError as e:
_LOGGER.debug("Could not access Atome's API: " + str(e))
error_flag = True
if error_flag:
return None
if req.status_code == 403:
# session is wrong, need to relogin
self.login()
logging.info("Got error %s, relogging (max retries: %s)", str(req.status_code), str(max_retries))
return self._get_info_from_server(url, max_retries + 1)
if req.text == "":
_LOGGER.debug("No data")
error_flag = True
return None
try:
json_output = req.json()
except (
OSError,
json.decoder.JSONDecodeError,
simplejson.errors.JSONDecodeError,
) as e:
_LOGGER.debug(
"Impossible to decode response: "
+ str(e)
+ "\nResponse was: "
+ str(req.text)
)
error_flag = True
if error_flag:
return None
return json_output
def get_live(self):
"""Get current data."""
live_url = (
API_BASE_URI
+ "/api/subscription/"
+ self._user_id
+ "/"
+ self._user_reference
+ API_ENDPOINT_LIVE
)
return self._get_info_from_server(live_url)
def get_consumption(self, period):
"""Get current data."""
if period not in [
DAILY_PERIOD_TYPE,
WEEKLY_PERIOD_TYPE,
MONTHLY_PERIOD_TYPE,
YEARLY_PERIOD_TYPE,
]:
raise ValueError(
"Period %s out of range. Shall be either 'day', 'week', 'month' or 'year'.",
str(period),
)
consumption_url = (
API_BASE_URI
+ "/api/subscription/"
+ self._user_id
+ "/"
+ self._user_reference
+ API_ENDPOINT_CONSUMPTION
+ "?period=so"
+ period[:1]
)
return self._get_info_from_server(consumption_url)
def close_session(self):
"""Close current session."""
self._session.close()
self._session = None
| [
[
[
46,
50
],
[
2455,
2459
],
[
4008,
4012
]
],
[
[
58,
65
],
[
589,
596
],
[
3622,
3629
]
],
[
[
74,
82
],
[
1356,
1364
]
],
[
[
90,
100
],
[
2497,
2507
],
[
4050,
4060
]
],
[
[
128,
137
],
[
1479,
1488
]
],
[
[
154,
171
],
[
4816,
4833
]
],
[
[
180,
198
],
[
4847,
4865
]
],
[
[
208,
227
],
[
4879,
4898
]
],
[
[
238,
256
],
[
4912,
4930
]
],
[
[
285,
296
]
],
[
[
311,
323
],
[
506,
518
],
[
4481,
4493
],
[
5149,
5161
]
],
[
[
362,
380
],
[
521,
539
]
],
[
[
406,
423
],
[
4624,
4641
]
],
[
[
447,
471
],
[
5292,
5316
]
],
[
[
494,
503
],
[
1759,
1768
]
],
[
[
541,
556
]
],
[
[
562,
573
],
[
3103,
3114
]
],
[
[
579,
586
],
[
1951,
1958
],
[
2560,
2567
],
[
3128,
3135
],
[
3367,
3374
],
[
3828,
3835
],
[
4113,
4120
]
],
[
[
625,
637
]
],
[
[
695,
706
]
]
] |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class WhenDirectivesFalse(Package):
"""Package that tests False when specs on directives."""
homepage = "http://www.example.com"
url = "http://www.example.com/example-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
patch('https://example.com/foo.patch',
sha256='abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
when=False)
extends('extendee', when=False)
depends_on('b', when=False)
conflicts('@1.0', when=False)
resource(url="http://www.example.com/example-1.0-resource.tar.gz",
md5='0123456789abcdef0123456789abcdef',
when=False)
| [
[
[
216,
217
],
[
246,
253
],
[
417,
424
],
[
473,
478
],
[
623,
630
],
[
659,
669
],
[
691,
700
],
[
725,
733
]
],
[
[
226,
245
]
]
] |
import numpy as np
import math
class Cache():
def __init__(self, max_size=10):
self.cache = []
self.size = 0
self.max_size=max_size
def add(self, element):
self.cache.append(element)
self.size+=1
if self.size > self.max_size:
del self.cache[0]
self.size = self.max_size
def mean(self):
return np.mean(np.array(self.cache), axis=0)
def empty(self):
return self.size == 0
def get_size(self):
return self.size
def get_last(self):
return self.cache[self.size-1]
def print_cache(self):
for e in self.cache:
print(e)
if __name__ == '__main__':
print('===Test Cache===')
cache = Cache(max_size=5)
cache.add([5,4])
print(cache.get_size())
print(cache.print_cache())
cache.add([8,1])
cache.add([3,2])
cache.add([4,5])
cache.add([6,2])
print(cache.get_size())
print(cache.print_cache())
cache.add([1,4])
print(cache.get_size())
print(cache.print_cache())
print(cache.mean())
| [
[
[
7,
18
],
[
396,
398
],
[
404,
406
]
],
[
[
26,
30
]
],
[
[
38,
43
],
[
753,
758
]
],
[
[
745,
750
],
[
775,
780
],
[
802,
807
],
[
830,
835
],
[
856,
861
],
[
877,
882
],
[
898,
903
],
[
919,
924
],
[
946,
951
],
[
974,
979
],
[
1000,
1005
],
[
1027,
1032
],
[
1055,
1060
],
[
1086,
1091
]
]
] |
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
from generate import generate
import System
import clr
import exceptions
def collect_excs():
ret = []
for e in exceptions.__dict__.values():
if not hasattr(e, '__bases__'): continue
if e.__name__ == "exceptions": continue
if e.__name__ == "__builtin__": continue
assert len(e.__bases__) <= 1, e
if len(e.__bases__) == 0:
continue
#supername = None
else:
supername = e.__bases__[0].__name__
ret.append( (e, supername) )
return ret
excs = collect_excs()
pythonExcs = ['ImportError', 'RuntimeError', 'UnicodeTranslateError', 'PendingDeprecationWarning', 'EnvironmentError',
'LookupError', 'OSError', 'DeprecationWarning', 'UnicodeError', 'FloatingPointError', 'ReferenceError',
'FutureWarning', 'AssertionError', 'RuntimeWarning', 'ImportWarning', 'UserWarning', 'SyntaxWarning',
'UnicodeWarning', 'StopIteration', 'BytesWarning', 'BufferError']
class ExceptionInfo(object):
def __init__(self, name, clrException, args, fields, subclasses, silverlightSupported = True, baseMapping = None):
self.name = name
self.clrException = clrException
self.args = args
self.fields = fields
self.subclasses = subclasses
self.silverlightSupported = silverlightSupported
self.parent = None
self.baseMapping = baseMapping
for child in subclasses:
child.parent = self
@property
def ConcreteParent(self):
while not self.parent.fields:
self = self.parent
if self.parent == None: return exceptionHierarchy
return self.parent
@property
def PythonType(self):
if not self.parent:
return 'DynamicHelpers.GetPythonTypeFromType(typeof(%s))' % self.name
else:
return self.name
@property
def ClrType(self):
if not self.parent:
return 'BaseException'
elif self.fields:
return '_' + self.name
else:
return self.name
@property
def ExceptionMappingName(self):
if self.baseMapping:
return self.baseMapping[self.baseMapping.rfind('.')+1:]
return self.DotNetExceptionName
@property
def DotNetExceptionName(self):
return self.clrException[self.clrException.rfind('.')+1:]
@property
def InternalPythonType(self):
if not self.parent:
return 'PythonExceptions._' + self.name
else:
return 'PythonExceptions.' + self.name
def BeginSilverlight(self, cw):
if not self.silverlightSupported:
cw.writeline('')
cw.writeline('#if !SILVERLIGHT');
def EndSilverlight(self, cw):
if not self.silverlightSupported:
cw.writeline('#endif // !SILVERLIGHT')
cw.writeline('');
def MakeNewException(self):
if self.fields or self.name == 'BaseException':
return 'new PythonExceptions._%s()' % (self.name)
else:
return 'new PythonExceptions.%s(PythonExceptions.%s)' % (self.ConcreteParent.ClrType, self.name)
# format is name, args, (fields, ...), (subclasses, ...)
exceptionHierarchy = ExceptionInfo('BaseException', 'IronPython.Runtime.Exceptions.PythonException', None, None, (
ExceptionInfo('GeneratorExit', 'IronPython.Runtime.Exceptions.GeneratorExitException', None, (), ()),
ExceptionInfo('SystemExit', 'IronPython.Runtime.Exceptions.SystemExitException', None, ('code',), ()),
ExceptionInfo('KeyboardInterrupt', 'Microsoft.Scripting.KeyboardInterruptException', None, (), ()),
ExceptionInfo('Exception', 'IronPython.Runtime.Exceptions.PythonException', None, (), (
ExceptionInfo('StopIteration', 'IronPython.Runtime.Exceptions.StopIterationException', None, (), ()),
ExceptionInfo('StandardError', 'System.ApplicationException', None, (), (
ExceptionInfo('BufferError', 'IronPython.Runtime.Exceptions.BufferException', None, (), ()),
ExceptionInfo('ArithmeticError', 'System.ArithmeticException', None, (), (
ExceptionInfo('FloatingPointError', 'IronPython.Runtime.Exceptions.FloatingPointException', None, (), ()),
ExceptionInfo('OverflowError', 'System.OverflowException', None, (), ()),
ExceptionInfo('ZeroDivisionError', 'System.DivideByZeroException', None, (), ()),
),
),
ExceptionInfo('AssertionError', 'IronPython.Runtime.Exceptions.AssertionException', None, (), ()),
ExceptionInfo('AttributeError', 'IronPython.Runtime.Exceptions.AttributeErrorException', None, (), (), baseMapping = 'System.MissingMemberException'),
ExceptionInfo('EnvironmentError', 'System.Runtime.InteropServices.ExternalException', None, ('errno', 'strerror', 'filename'), (
ExceptionInfo('IOError', 'System.IO.IOException', None, (), ()),
ExceptionInfo('OSError', 'IronPython.Runtime.Exceptions.OSException', None, (), (
ExceptionInfo('WindowsError', 'System.ComponentModel.Win32Exception', None, ('winerror',), ()),
),
),
),
),
ExceptionInfo('EOFError', 'System.IO.EndOfStreamException', None, (), ()),
ExceptionInfo('ImportError', 'IronPython.Runtime.Exceptions.ImportException', None, (), ()),
ExceptionInfo('LookupError', 'IronPython.Runtime.Exceptions.LookupException', None, (), (
ExceptionInfo('IndexError', 'System.IndexOutOfRangeException', None, (), ()),
ExceptionInfo('KeyError', 'System.Collections.Generic.KeyNotFoundException', None, (), ()),
),
),
ExceptionInfo('MemoryError', 'System.OutOfMemoryException', None, (), ()),
ExceptionInfo('NameError', 'IronPython.Runtime.UnboundNameException', None, (), (
ExceptionInfo('UnboundLocalError', 'IronPython.Runtime.UnboundLocalException', None, (), ()),
),
),
ExceptionInfo('ReferenceError', 'IronPython.Runtime.Exceptions.ReferenceException', None, (), ()),
ExceptionInfo('RuntimeError', 'IronPython.Runtime.Exceptions.RuntimeException', None, (), (
ExceptionInfo('NotImplementedError', 'System.NotImplementedException', None, (), ()),
),
),
ExceptionInfo('SyntaxError', 'Microsoft.Scripting.SyntaxErrorException', None, ('text', 'print_file_and_line', 'filename', 'lineno', 'offset', 'msg'), (
ExceptionInfo('IndentationError', 'IronPython.Runtime.Exceptions.IndentationException', None, (), (
ExceptionInfo('TabError', 'IronPython.Runtime.Exceptions.TabException', None, (), ()),
),
),
),
),
ExceptionInfo('SystemError', 'System.SystemException', None, (), ()),
ExceptionInfo('TypeError', 'IronPython.Runtime.Exceptions.TypeErrorException', None, (), (), baseMapping = 'Microsoft.Scripting.ArgumentTypeException'),
ExceptionInfo('ValueError', 'IronPython.Runtime.Exceptions.ValueErrorException', None, (), (
ExceptionInfo('UnicodeError', 'IronPython.Runtime.Exceptions.UnicodeException', None, (),
(
ExceptionInfo('UnicodeDecodeError', 'System.Text.DecoderFallbackException', ('encoding', 'object', 'start', 'end', 'reason'), ('start', 'reason', 'object', 'end', 'encoding'), ()),
ExceptionInfo('UnicodeEncodeError', 'System.Text.EncoderFallbackException', ('encoding', 'object', 'start', 'end', 'reason'), ('start', 'reason', 'object', 'end', 'encoding'), ()),
ExceptionInfo('UnicodeTranslateError', 'IronPython.Runtime.Exceptions.UnicodeTranslateException', None, ('start', 'reason', 'object', 'end', 'encoding'), ()),
),
),
),
baseMapping = 'System.ArgumentException'
),
),
),
ExceptionInfo('Warning', 'System.ComponentModel.WarningException', None, (), (
ExceptionInfo('DeprecationWarning', 'IronPython.Runtime.Exceptions.DeprecationWarningException', None, (), ()),
ExceptionInfo('PendingDeprecationWarning', 'IronPython.Runtime.Exceptions.PendingDeprecationWarningException', None, (), ()),
ExceptionInfo('RuntimeWarning', 'IronPython.Runtime.Exceptions.RuntimeWarningException', None, (), ()),
ExceptionInfo('SyntaxWarning', 'IronPython.Runtime.Exceptions.SyntaxWarningException', None, (), ()),
ExceptionInfo('UserWarning', 'IronPython.Runtime.Exceptions.UserWarningException', None, (), ()),
ExceptionInfo('FutureWarning', 'IronPython.Runtime.Exceptions.FutureWarningException', None, (), ()),
ExceptionInfo('ImportWarning', 'IronPython.Runtime.Exceptions.ImportWarningException', None, (), ()),
ExceptionInfo('UnicodeWarning', 'IronPython.Runtime.Exceptions.UnicodeWarningException', None, (), ()),
ExceptionInfo('BytesWarning', 'IronPython.Runtime.Exceptions.BytesWarningException', None, (), ()),
),
),
),
),
),
)
def get_exception_info(pythonName, curHierarchy):
for exception in curHierarchy.subclasses:
if exception.name == pythonName:
return exception
for exception in curHierarchy.subclasses:
res = get_exception_info(pythonName, exception)
if res is not None:
return res
def get_all_exceps(l, curHierarchy):
# if we have duplicate CLR exceptions (e.g. VMSError and Exception)
# only generate the one highest in the Python hierarchy
for exception in curHierarchy.subclasses:
found = False
for e in l:
if e.clrException == exception.clrException:
found = True
break
if not found:
l.append(exception)
for exception in curHierarchy.subclasses:
get_all_exceps(l, exception)
return l
ip = clr.LoadAssemblyByPartialName('ironpython')
ms = clr.LoadAssemblyByPartialName('Microsoft.Scripting')
md = clr.LoadAssemblyByPartialName('Microsoft.Dynamic')
sysdll = clr.LoadAssemblyByPartialName('System')
def get_type(name):
if name.startswith('IronPython'): return ip.GetType(name)
if name.startswith('Microsoft.Scripting'):
res = ms.GetType(name)
return res if res is not None else md.GetType(name)
if name.startswith('System.ComponentModel'): return sysdll.GetType(name)
return System.Type.GetType(name)
def exception_distance(a):
distance = 0
while a.FullName != "System.Exception":
a = a.BaseType
distance += 1
return distance
def get_compare_name(ex_info):
return ex_info.baseMapping or ex_info.clrException
def compare_exceptions(a, b):
a, b = get_compare_name(a), get_compare_name(b)
ta = get_type(a)
tb = get_type(b)
if ta == None:
raise Exception("Exception class not found %s " % a)
if tb == None:
raise Exception("Exception class not found %s " % b)
if ta.IsSubclassOf(tb): return -1
if tb.IsSubclassOf(ta): return 1
da = exception_distance(ta)
db = exception_distance(tb)
# put exceptions further from System.Exception 1st, those further later...
if da != db: return db - da
return cmp(ta.Name, tb.Name)
def gen_topython_helper(cw):
cw.enter_block("private static BaseException/*!*/ ToPythonHelper(System.Exception clrException)")
allExceps = get_all_exceps([], exceptionHierarchy)
allExceps.sort(cmp=compare_exceptions)
for x in allExceps[:-1]: # skip System.Exception which is last...
if not x.silverlightSupported: cw.writeline('#if !SILVERLIGHT')
cw.writeline('if (clrException is %s) return %s;' % (x.ExceptionMappingName, x.MakeNewException()))
if not x.silverlightSupported: cw.writeline('#endif')
cw.writeline('return new BaseException(Exception);')
cw.exit_block()
def get_clr_name(e):
return e.replace('Error', '') + 'Exception'
FACTORY = """
public static Exception %(name)s(string format, params object[] args) {
return new %(clrname)s(string.Format(format, args));
}"""
def factory_gen(cw):
for e in pythonExcs:
cw.write(FACTORY, name=e, clrname=get_clr_name(e))
CLASS1 = """
[Serializable]
public class %(name)s : %(supername)s, IPythonAwareException {
private object _pyExceptionObject;
private List<DynamicStackFrame> _frames;
private TraceBack _traceback;
public %(name)s() : base() { }
public %(name)s(string msg) : base(msg) { }
public %(name)s(string message, Exception innerException)
: base(message, innerException) {
}
#if FEATURE_SERIALIZATION
protected %(name)s(SerializationInfo info, StreamingContext context) : base(info, context) { }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2123:OverrideLinkDemandsShouldBeIdenticalToBase")]
public override void GetObjectData(SerializationInfo info, StreamingContext context) {
info.AddValue("frames", _frames);
info.AddValue("traceback", _traceback);
base.GetObjectData(info, context);
}
#endif
object IPythonAwareException.PythonException {
get {
if (_pyExceptionObject == null) {
var newEx = %(make_new_exception)s;
newEx.InitializeFromClr(this);
_pyExceptionObject = newEx;
}
return _pyExceptionObject;
}
set { _pyExceptionObject = value; }
}
List<DynamicStackFrame> IPythonAwareException.Frames {
get { return _frames; }
set { _frames = value; }
}
TraceBack IPythonAwareException.TraceBack {
get { return _traceback; }
set { _traceback = value; }
}
}
"""
def gen_one_exception(cw, e):
supername = getattr(exceptions, e).__bases__[0].__name__
if not supername in pythonExcs and supername != 'Warning':
supername = ''
cw.write(CLASS1, name=get_clr_name(e), supername=get_clr_name(supername), make_new_exception = get_exception_info(e, exceptionHierarchy).MakeNewException())
def gen_one_exception_maker(e):
def gen_one_exception_specialized(x):
return gen_one_exception(x, e)
return gen_one_exception_specialized
def fix_object(name):
if name == "object": return "@object"
return name
def gen_one_new_exception(cw, exception, parent):
if exception.fields:
exception.BeginSilverlight(cw)
cw.writeline('[MultiRuntimeAware]')
cw.writeline('private static PythonType %sStorage;' % (exception.name, ))
cw.enter_block('public static PythonType %s' % (exception.name, ))
cw.enter_block('get')
cw.enter_block('if (%sStorage == null)' % (exception.name, ))
cw.enter_block('lock (_pythonExceptionsLock)')
cw.writeline('%sStorage = CreateSubType(%s, typeof(_%s), msg => new %s(msg));' % (exception.name, exception.parent.PythonType, exception.name, exception.DotNetExceptionName))
cw.exit_block() # lock
cw.exit_block() # if
cw.writeline('return %sStorage;' % (exception.name, ))
cw.exit_block()
cw.exit_block()
cw.writeline()
cw.writeline('[PythonType("%s"), PythonHidden, DynamicBaseTypeAttribute, Serializable]' % exception.name)
if exception.ConcreteParent.fields:
cw.enter_block('public partial class _%s : _%s' % (exception.name, exception.ConcreteParent.name))
else:
cw.enter_block('public partial class _%s : %s' % (exception.name, exception.ConcreteParent.name))
for field in exception.fields:
cw.writeline('private object _%s;' % field)
if exception.fields:
cw.writeline('')
cw.writeline('public _%s() : base(%s) { }' % (exception.name, exception.name))
cw.writeline('public _%s(PythonType type) : base(type) { }' % (exception.name, ))
cw.writeline('')
cw.enter_block('public new static object __new__(PythonType cls, [ParamDictionary]IDictionary<object, object> kwArgs, params object[] args)')
cw.writeline('return Activator.CreateInstance(cls.UnderlyingSystemType, cls);')
cw.exit_block()
cw.writeline('')
if exception.args:
argstr = ', '.join(['object ' + fix_object(x) for x in exception.args])
cw.enter_block('public void __init__(%s)' % (argstr))
for arg in exception.args:
cw.writeline('_%s = %s;' % (arg, fix_object(arg)))
cw.writeline('args = PythonTuple.MakeTuple(' + ', '.join([fix_object(x) for x in exception.args]) + ');')
cw.exit_block()
cw.writeline('')
cw.enter_block('public override void __init__(params object[] args)')
cw.enter_block('if (args == null || args.Length != %d)' % (len(exception.args), ))
cw.writeline('throw PythonOps.TypeError("__init__ takes exactly %d arguments ({0} given)", args.Length);' % len(exception.args))
cw.exit_block()
cw.writeline('__init__(' + ', '.join([fix_object(x) for x in exception.args]) + ');')
cw.exit_block()
cw.writeline('')
for field in exception.fields:
cw.enter_block('public object %s' % fix_object(field))
cw.writeline('get { return _%s; }' % field)
cw.writeline('set { _%s = value; }' % field)
cw.exit_block()
cw.writeline('')
cw.exit_block()
cw.writeline('')
exception.EndSilverlight(cw)
else:
cw.writeline('[MultiRuntimeAware]')
cw.writeline('private static PythonType %sStorage;' % (exception.name, ))
cw.enter_block('public static PythonType %s' % (exception.name, ))
cw.enter_block('get')
cw.enter_block('if (%sStorage == null)' % (exception.name, ))
cw.enter_block('lock (_pythonExceptionsLock)')
cw.writeline('%sStorage = CreateSubType(%s, "%s", msg => new %s(msg));' % (exception.name, exception.parent.PythonType, exception.name, exception.DotNetExceptionName))
cw.exit_block() # lock
cw.exit_block() # if
cw.writeline('return %sStorage;' % (exception.name, ))
cw.exit_block()
cw.exit_block()
cw.writeline()
for child in exception.subclasses:
gen_one_new_exception(cw, child, exception)
def newstyle_gen(cw):
for child in exceptionHierarchy.subclasses:
gen_one_new_exception(cw, child, exceptionHierarchy)
def gen_one_exception_module_entry(cw, exception, parent):
exception.BeginSilverlight(cw)
cw.write("public static PythonType %s = %s;" % (exception.name, exception.InternalPythonType))
exception.EndSilverlight(cw)
for child in exception.subclasses:
gen_one_exception_module_entry(cw, child, exception)
def module_gen(cw):
cw.write("public static object BaseException = DynamicHelpers.GetPythonTypeFromType(typeof(PythonExceptions.BaseException));")
for child in exceptionHierarchy.subclasses:
gen_one_exception_module_entry(cw, child, exceptionHierarchy)
def gen_one_exception_builtin_entry(cw, exception, parent):
exception.BeginSilverlight(cw)
cw.enter_block("public static PythonType %s" % (exception.name, ))
if exception.fields:
cw.write('get { return %s; }' % (exception.InternalPythonType, ))
else:
cw.write('get { return %s; }' % (exception.InternalPythonType, ))
cw.exit_block()
exception.EndSilverlight(cw)
for child in exception.subclasses:
gen_one_exception_builtin_entry(cw, child, exception)
def builtin_gen(cw):
for child in exceptionHierarchy.subclasses:
gen_one_exception_builtin_entry(cw, child, exceptionHierarchy)
def main():
gens = [
("ToPython Exception Helper", gen_topython_helper),
("Exception Factories", factory_gen),
("Python New-Style Exceptions", newstyle_gen),
("builtin exceptions", builtin_gen),
]
for e in pythonExcs:
gens.append((get_clr_name(e), gen_one_exception_maker(e)))
return generate(*gens)
if __name__ == "__main__":
main()
| [
[
[
751,
759
],
[
22830,
22838
]
],
[
[
767,
773
],
[
12946,
12952
]
],
[
[
781,
784
],
[
12406,
12409
],
[
12455,
12458
],
[
12513,
12516
],
[
12573,
12576
]
],
[
[
793,
803
],
[
851,
861
],
[
16417,
16427
]
],
[
[
809,
821
],
[
1284,
1296
]
],
[
[
1277,
1281
]
],
[
[
1300,
1310
],
[
14742,
14752
],
[
16478,
16488
],
[
22739,
22749
]
],
[
[
1742,
1755
],
[
4049,
4062
],
[
4155,
4168
],
[
4269,
4282
],
[
4384,
4397
],
[
4496,
4509
],
[
4604,
4617
],
[
4726,
4739
],
[
4828,
4841
],
[
4949,
4962
],
[
5060,
5073
],
[
5203,
5216
],
[
5313,
5326
],
[
5553,
5566
],
[
5680,
5693
],
[
5859,
5872
],
[
6024,
6037
],
[
6125,
6138
],
[
6251,
6264
],
[
6523,
6536
],
[
6626,
6639
],
[
6747,
6760
],
[
6873,
6886
],
[
6987,
7000
],
[
7173,
7186
],
[
7276,
7289
],
[
7394,
7407
],
[
7582,
7595
],
[
7709,
7722
],
[
7837,
7850
],
[
8017,
8030
],
[
8206,
8219
],
[
8350,
8363
],
[
8681,
8694
],
[
8779,
8792
],
[
8960,
8973
],
[
9089,
9102
],
[
9266,
9279
],
[
9491,
9504
],
[
9716,
9729
],
[
10166,
10179
],
[
10273,
10286
],
[
10413,
10426
],
[
10567,
10580
],
[
10699,
10712
],
[
10829,
10842
],
[
10955,
10968
],
[
11085,
11098
],
[
11215,
11228
],
[
11347,
11360
]
],
[
[
4028,
4046
],
[
2398,
2416
],
[
14006,
14024
],
[
16661,
16679
],
[
21126,
21144
],
[
21198,
21216
],
[
21738,
21756
],
[
21819,
21837
],
[
22385,
22403
],
[
22467,
22485
]
],
[
[
11560,
11578
],
[
11782,
11800
],
[
16639,
16657
]
],
[
[
11880,
11894
],
[
12358,
12372
],
[
13987,
14001
]
],
[
[
12401,
12403
],
[
12690,
12692
]
],
[
[
12450,
12452
],
[
12771,
12773
]
],
[
[
12508,
12510
],
[
12831,
12833
]
],
[
[
12564,
12570
],
[
12909,
12915
]
],
[
[
12618,
12626
],
[
13316,
13324
],
[
13337,
13345
]
],
[
[
12978,
12996
],
[
13621,
13639
],
[
13653,
13671
]
],
[
[
13137,
13153
],
[
13261,
13277
],
[
13282,
13298
]
],
[
[
13224,
13242
],
[
14049,
14067
]
],
[
[
13839,
13858
],
[
22551,
22570
]
],
[
[
14493,
14505
],
[
14796,
14808
],
[
16566,
16578
],
[
16593,
16605
],
[
22772,
22784
]
],
[
[
14559,
14566
],
[
14771,
14778
]
],
[
[
14712,
14723
],
[
22605,
22616
]
],
[
[
14814,
14820
],
[
16553,
16559
]
],
[
[
16363,
16380
],
[
16791,
16808
]
],
[
[
16706,
16729
],
[
22789,
22812
]
],
[
[
16862,
16872
],
[
18947,
18957
],
[
19154,
19164
],
[
19242,
19252
],
[
19743,
19753
],
[
19944,
19954
]
],
[
[
16943,
16964
],
[
21030,
21051
],
[
21165,
21186
]
],
[
[
21091,
21103
],
[
22659,
22671
]
],
[
[
21231,
21261
],
[
21503,
21533
],
[
21777,
21807
]
],
[
[
21569,
21579
]
],
[
[
21844,
21875
],
[
22292,
22323
],
[
22424,
22455
]
],
[
[
22351,
22362
],
[
22705,
22716
]
],
[
[
22492,
22496
],
[
22878,
22882
]
]
] |
import itertools
from operator import getitem
import pytest
from toolz import merge
np = pytest.importorskip('numpy')
import dask
import dask.array as da
from dask.array.slicing import (_sanitize_index_element, _slice_1d,
new_blockdim, sanitize_index, slice_array,
take, normalize_index, slicing_plan)
from dask.array.utils import assert_eq, same_keys
def test_slice_1d():
expected = {0: slice(10, 25, 1), 1: slice(None, None, None), 2: slice(0, 1, 1)}
result = _slice_1d(100, [25] * 4, slice(10, 51, None))
assert expected == result
# x[100:12:-3]
expected = {0: slice(-2, -8, -3),
1: slice(-1, -21, -3),
2: slice(-3, -21, -3),
3: slice(-2, -21, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(100, 12, -3))
assert expected == result
# x[102::-3]
expected = {0: slice(-2, -21, -3),
1: slice(-1, -21, -3),
2: slice(-3, -21, -3),
3: slice(-2, -21, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(102, None, -3))
assert expected == result
# x[::-4]
expected = {0: slice(-1, -21, -4),
1: slice(-1, -21, -4),
2: slice(-1, -21, -4),
3: slice(-1, -21, -4),
4: slice(-1, -21, -4)}
result = _slice_1d(100, [20] * 5, slice(None, None, -4))
assert expected == result
# x[::-7]
expected = {0: slice(-5, -21, -7),
1: slice(-4, -21, -7),
2: slice(-3, -21, -7),
3: slice(-2, -21, -7),
4: slice(-1, -21, -7)}
result = _slice_1d(100, [20] * 5, slice(None, None, -7))
assert expected == result
# x=range(115)
# x[::-7]
expected = {0: slice(-7, -24, -7),
1: slice(-2, -24, -7),
2: slice(-4, -24, -7),
3: slice(-6, -24, -7),
4: slice(-1, -24, -7)}
result = _slice_1d(115, [23] * 5, slice(None, None, -7))
assert expected == result
# x[79::-3]
expected = {0: slice(-1, -21, -3),
1: slice(-3, -21, -3),
2: slice(-2, -21, -3),
3: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(79, None, -3))
assert expected == result
# x[-1:-8:-1]
expected = {4: slice(-1, -8, -1)}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(-1, 92, -1))
assert expected == result
# x[20:0:-1]
expected = {0: slice(-1, -20, -1),
1: slice(-20, -21, -1)}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(20, 0, -1))
assert expected == result
# x[:0]
expected = {}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(0))
assert result
# x=range(99)
expected = {0: slice(-3, -21, -3),
1: slice(-2, -21, -3),
2: slice(-1, -21, -3),
3: slice(-2, -20, -3),
4: slice(-1, -21, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(99, [20, 20, 20, 19, 20], slice(100, None, -3))
assert expected == result
# x=range(104)
# x[::-3]
expected = {0: slice(-1, -21, -3),
1: slice(-3, -24, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-1, -22, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(None, None, -3))
assert expected == result
# x=range(104)
# x[:27:-3]
expected = {1: slice(-3, -16, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-1, -22, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(None, 27, -3))
assert expected == result
# x=range(104)
# x[100:27:-3]
expected = {1: slice(-3, -16, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-4, -22, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(100, 27, -3))
assert expected == result
# x=range(1000000000000)
# x[1000:]
expected = {0: slice(1000, 1000000000, 1)}
expected.update({ii: slice(None, None, None) for ii in range(1, 1000)})
# This array is large
result = _slice_1d(1000000000000,
[1000000000] * 1000,
slice(1000, None, None))
assert expected == result
def test_slice_singleton_value_on_boundary():
assert _slice_1d(15, [5, 5, 5], 10) == {2: 0}
assert _slice_1d(30, (5, 5, 5, 5, 5, 5), 10) == {2: 0}
def test_slice_array_1d():
#x[24::2]
expected = {('y', 0): (getitem, ('x', 0), (slice(24, 25, 2),)),
('y', 1): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 2): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 3): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [[25] * 4], [slice(24, None, 2)])
assert expected == result
#x[26::2]
expected = {('y', 0): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 1): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 2): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [[25] * 4], [slice(26, None, 2)])
assert expected == result
#x[24::2]
expected = {('y', 0): (getitem, ('x', 0), (slice(24, 25, 2),)),
('y', 1): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 2): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 3): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [(25, ) * 4], (slice(24, None, 2), ))
assert expected == result
#x[26::2]
expected = {('y', 0): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 1): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 2): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [(25, ) * 4], (slice(26, None, 2), ))
assert expected == result
def test_slice_array_2d():
#2d slices: x[13::2,10::1]
expected = {('y', 0, 0): (getitem, ('x', 0, 0),
(slice(13, 20, 2), slice(10, 20, 1))),
('y', 0, 1): (getitem, ('x', 0, 1),
(slice(13, 20, 2), slice(None, None, None))),
('y', 0, 2): (getitem, ('x', 0, 2),
(slice(13, 20, 2), slice(None, None, None)))}
result, chunks = slice_array('y', 'x', [[20], [20, 20, 5]],
[slice(13, None, 2), slice(10, None, 1)])
assert expected == result
#2d slices with one dimension: x[5,10::1]
expected = {('y', 0): (getitem, ('x', 0, 0),
(5, slice(10, 20, 1))),
('y', 1): (getitem, ('x', 0, 1),
(5, slice(None, None, None))),
('y', 2): (getitem, ('x', 0, 2),
(5, slice(None, None, None)))}
result, chunks = slice_array('y', 'x', ([20], [20, 20, 5]),
[5, slice(10, None, 1)])
assert expected == result
def test_slice_optimizations():
#bar[:]
expected = {('foo', 0): ('bar', 0)}
result, chunks = slice_array('foo', 'bar', [[100]], (slice(None, None, None),))
assert expected == result
#bar[:,:,:]
expected = {('foo', 0): ('bar', 0),
('foo', 1): ('bar', 1),
('foo', 2): ('bar', 2)}
result, chunks = slice_array('foo', 'bar', [(100, 1000, 10000)],
(slice(None, None, None),
slice(None, None, None),
slice(None, None, None)))
assert expected == result
def test_slicing_with_singleton_indices():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]), (slice(0, 5), 8))
expected = {('y', 0): (getitem, ('x', 0, 1), (slice(None, None, None), 3))}
assert expected == result
def test_slicing_with_newaxis():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 3), None, slice(None, None, None)))
expected = {
('y', 0, 0, 0): (getitem, ('x', 0, 0),
(slice(0, 3, 1), None, slice(None, None, None))),
('y', 0, 0, 1): (getitem, ('x', 0, 1),
(slice(0, 3, 1), None, slice(None, None, None)))}
assert expected == result
assert chunks == ((3,), (1,), (5, 5))
def test_take():
chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [5, 1, 47, 3], axis=0)
expected = {('y', 0): (getitem, ('x', 0), (np.array([5, 1]),)),
('y', 1): (getitem, ('x', 2), (np.array([7]),)),
('y', 2): (getitem, ('x', 0), (np.array([3]),))}
np.testing.assert_equal(sorted(dsk.items()), sorted(expected.items()))
assert chunks == ((2, 1, 1),)
chunks, dsk = take('y', 'x', [(20, 20, 20, 20), (20, 20)], [
5, 1, 47, 3], axis=0)
expected = {('y', 0, 0): (getitem, ('x', 0, 0), (np.array([5, 1]), slice(None, None, None))),
('y', 0, 1): (getitem, ('x', 0, 1), (np.array([5, 1]), slice(None, None, None))),
('y', 1, 0): (getitem, ('x', 2, 0), (np.array([7]), slice(None, None, None))),
('y', 1, 1): (getitem, ('x', 2, 1), (np.array([7]), slice(None, None, None))),
('y', 2, 0): (getitem, ('x', 0, 0), (np.array([3]), slice(None, None, None))),
('y', 2, 1): (getitem, ('x', 0, 1), (np.array([3]), slice(None, None, None)))}
np.testing.assert_equal(sorted(dsk.items()), sorted(expected.items()))
assert chunks == ((2, 1, 1), (20, 20))
def test_take_sorted():
chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [1, 3, 5, 47], axis=0)
expected = {('y', 0): (getitem, ('x', 0), ([1, 3, 5],)),
('y', 1): (getitem, ('x', 2), ([7],))}
np.testing.assert_equal(dsk, expected)
assert chunks == ((3, 1),)
chunks, dsk = take('y', 'x', [(20, 20, 20, 20), (20, 20)], [1, 3, 5, 37], axis=1)
expected = merge(dict((('y', i, 0), (getitem, ('x', i, 0),
(slice(None, None, None), [1, 3, 5])))
for i in range(4)),
dict((('y', i, 1), (getitem, ('x', i, 1),
(slice(None, None, None), [17])))
for i in range(4)))
np.testing.assert_equal(dsk, expected)
assert chunks == ((20, 20, 20, 20), (3, 1))
def test_slicing_chunks():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(1, np.array([2, 0, 3])))
assert chunks == ((3,), )
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 7), np.array([2, 0, 3])))
assert chunks == ((5, 2), (3, ))
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 7), 1))
assert chunks == ((5, 2), )
def test_slicing_with_numpy_arrays():
a, bd1 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)),
(np.array([1, 2, 9]), slice(None, None, None)))
b, bd2 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)),
(np.array([1, 2, 9]), slice(None, None, None)))
assert bd1 == bd2
np.testing.assert_equal(a, b)
i = [False, True, True, False, False,
False, False, False, False, True]
index = (i, slice(None, None, None))
index = normalize_index(index, (10, 10))
c, bd3 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)), index)
assert bd1 == bd3
np.testing.assert_equal(a, c)
def test_slicing_and_chunks():
o = da.ones((24, 16), chunks=((4, 8, 8, 4), (2, 6, 6, 2)))
t = o[4:-4, 2:-2]
assert t.chunks == ((8, 8), (6, 6))
def test_slicing_identities():
a = da.ones((24, 16), chunks=((4, 8, 8, 4), (2, 6, 6, 2)))
assert a is a[slice(None)]
assert a is a[:]
assert a is a[::]
assert a is a[...]
assert a is a[0:]
assert a is a[0::]
assert a is a[::1]
assert a is a[0:len(a)]
assert a is a[0::1]
assert a is a[0:len(a):1]
def test_slice_stop_0():
# from gh-125
a = da.ones(10, chunks=(10,))[:0].compute()
b = np.ones(10)[:0]
assert_eq(a, b)
def test_slice_list_then_None():
x = da.zeros(shape=(5, 5), chunks=(3, 3))
y = x[[2, 1]][None]
assert_eq(y, np.zeros((1, 2, 5)))
class ReturnItem(object):
def __getitem__(self, key):
return key
@pytest.mark.skip(reason='really long test')
def test_slicing_exhaustively():
x = np.random.rand(6, 7, 8)
a = da.from_array(x, chunks=(3, 3, 3))
I = ReturnItem()
# independent indexing along different axes
indexers = [0, -2, I[:], I[:5], [0, 1], [0, 1, 2], [4, 2], I[::-1], None, I[:0], []]
for i in indexers:
assert_eq(x[i], a[i]), i
for j in indexers:
assert_eq(x[i][:, j], a[i][:, j]), (i, j)
assert_eq(x[:, i][j], a[:, i][j]), (i, j)
for k in indexers:
assert_eq(x[..., i][:, j][k], a[..., i][:, j][k]), (i, j, k)
# repeated indexing along the first axis
first_indexers = [I[:], I[:5], np.arange(5), [3, 1, 4, 5, 0], np.arange(6) < 6]
second_indexers = [0, -1, 3, I[:], I[:3], I[2:-1], [2, 4], [], I[:0]]
for i in first_indexers:
for j in second_indexers:
assert_eq(x[i][j], a[i][j]), (i, j)
def test_slicing_with_negative_step_flops_keys():
x = da.arange(10, chunks=5)
y = x[:1:-1]
assert (x.name, 1) in y.dask[(y.name, 0)]
assert (x.name, 0) in y.dask[(y.name, 1)]
assert_eq(y, np.arange(10)[:1:-1])
assert y.chunks == ((5, 3),)
assert y.dask[(y.name, 0)] == (getitem, (x.name, 1),
(slice(-1, -6, -1),))
assert y.dask[(y.name, 1)] == (getitem, (x.name, 0),
(slice(-1, -4, -1),))
def test_empty_slice():
x = da.ones((5, 5), chunks=(2, 2), dtype='i4')
y = x[:0]
assert_eq(y, np.ones((5, 5), dtype='i4')[:0])
def test_multiple_list_slicing():
x = np.random.rand(6, 7, 8)
a = da.from_array(x, chunks=(3, 3, 3))
assert_eq(x[:, [0, 1, 2]][[0, 1]], a[:, [0, 1, 2]][[0, 1]])
@pytest.mark.skipif(np.__version__ < '1.13.0',
reason='boolean lists are not treated as boolean indexes')
def test_boolean_list_slicing():
with pytest.raises(IndexError):
da.asarray(range(2))[[True]]
with pytest.raises(IndexError):
da.asarray(range(2))[[False, False, False]]
x = np.arange(5)
ind = [True, False, False, False, True]
assert_eq(da.asarray(x)[ind], x[ind])
# https://github.com/dask/dask/issues/3706
ind = [True]
assert_eq(da.asarray([0])[ind], np.arange(1)[ind])
def test_boolean_numpy_array_slicing():
with pytest.raises(IndexError):
da.asarray(range(2))[np.array([True])]
with pytest.raises(IndexError):
da.asarray(range(2))[np.array([False, False, False])]
x = np.arange(5)
ind = np.array([True, False, False, False, True])
assert_eq(da.asarray(x)[ind], x[ind])
# https://github.com/dask/dask/issues/3706
ind = np.array([True])
assert_eq(da.asarray([0])[ind], np.arange(1)[ind])
def test_empty_list():
x = np.ones((5, 5, 5), dtype='i4')
dx = da.from_array(x, chunks=2)
assert_eq(dx[[], :3, :2], x[[], :3, :2])
assert_eq(dx[:3, [], :2], x[:3, [], :2])
assert_eq(dx[:3, :2, []], x[:3, :2, []])
def test_uneven_chunks():
assert da.ones(20, chunks=5)[::2].chunks == ((3, 2, 3, 2),)
def test_new_blockdim():
assert new_blockdim(20, [5, 5, 5, 5], slice(0, None, 2)) == [3, 2, 3, 2]
def test_slicing_consistent_names():
x = np.arange(100).reshape((10, 10))
a = da.from_array(x, chunks=(5, 5))
assert same_keys(a[0], a[0])
assert same_keys(a[:, [1, 2, 3]], a[:, [1, 2, 3]])
assert same_keys(a[:, 5:2:-1], a[:, 5:2:-1])
assert same_keys(a[0, ...], a[0, ...])
assert same_keys(a[...], a[...])
assert same_keys(a[[1, 3, 5]], a[[1, 3, 5]])
assert same_keys(a[-11:11], a[:])
assert same_keys(a[-11:-9], a[:1])
assert same_keys(a[-1], a[9])
assert same_keys(a[0::-1], a[0:-11:-1])
def test_slicing_consistent_names_after_normalization():
x = da.zeros(10, chunks=(5,))
assert same_keys(x[0:], x[:10])
assert same_keys(x[0:], x[0:10])
assert same_keys(x[0:], x[0:10:1])
assert same_keys(x[:], x[0:10:1])
def test_sanitize_index_element():
with pytest.raises(TypeError):
_sanitize_index_element('Hello!')
def test_sanitize_index():
pd = pytest.importorskip('pandas')
with pytest.raises(TypeError):
sanitize_index('Hello!')
np.testing.assert_equal(sanitize_index(pd.Series([1, 2, 3])), [1, 2, 3])
np.testing.assert_equal(sanitize_index((1, 2, 3)), [1, 2, 3])
def test_uneven_blockdims():
blockdims = ((31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30), (100,))
index = (slice(240, 270), slice(None))
dsk_out, bd_out = slice_array('in', 'out', blockdims, index)
sol = {('in', 0, 0): (getitem, ('out', 7, 0), (slice(28, 31, 1), slice(None))),
('in', 1, 0): (getitem, ('out', 8, 0), (slice(0, 27, 1), slice(None)))}
assert dsk_out == sol
assert bd_out == ((3, 27), (100,))
blockdims = ((31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30),) * 2
index = (slice(240, 270), slice(180, 230))
dsk_out, bd_out = slice_array('in', 'out', blockdims, index)
sol = {('in', 0, 0): (getitem, ('out', 7, 5), (slice(28, 31, 1), slice(29, 30, 1))),
('in', 0, 1): (getitem, ('out', 7, 6), (slice(28, 31, 1), slice(None))),
('in', 0, 2): (getitem, ('out', 7, 7), (slice(28, 31, 1), slice(0, 18, 1))),
('in', 1, 0): (getitem, ('out', 8, 5), (slice(0, 27, 1), slice(29, 30, 1))),
('in', 1, 1): (getitem, ('out', 8, 6), (slice(0, 27, 1), slice(None))),
('in', 1, 2): (getitem, ('out', 8, 7), (slice(0, 27, 1), slice(0, 18, 1)))}
assert dsk_out == sol
assert bd_out == ((3, 27), (1, 31, 18))
def test_oob_check():
x = da.ones(5, chunks=(2,))
with pytest.raises(IndexError):
x[6]
with pytest.raises(IndexError):
x[[6]]
with pytest.raises(IndexError):
x[-10]
with pytest.raises(IndexError):
x[[-10]]
with pytest.raises(IndexError):
x[0, 0]
@pytest.mark.parametrize('idx_chunks', [None, 3, 2, 1])
@pytest.mark.parametrize('x_chunks', [None, (3, 5), (2, 3), (1, 2), (1, 1)])
def test_index_with_int_dask_array(x_chunks, idx_chunks):
# test data is crafted to stress use cases:
# - pick from different chunks of x out of order
# - a chunk of x contains no matches
# - only one chunk of x
x = np.array([[10, 20, 30, 40, 50],
[60, 70, 80, 90, 100],
[110, 120, 130, 140, 150]])
idx = np.array([3, 0, 1])
expect = np.array([[40, 10, 20],
[90, 60, 70],
[140, 110, 120]])
if x_chunks is not None:
x = da.from_array(x, chunks=x_chunks)
if idx_chunks is not None:
idx = da.from_array(idx, chunks=idx_chunks)
assert_eq(x[:, idx], expect)
assert_eq(x.T[idx, :], expect.T)
@pytest.mark.parametrize('chunks', [1, 2, 3])
def test_index_with_int_dask_array_0d(chunks):
# Slice by 0-dimensional array
x = da.from_array([[10, 20, 30],
[40, 50, 60]], chunks=chunks)
idx0 = da.from_array(1, chunks=1)
assert_eq(x[idx0, :], x[1, :])
assert_eq(x[:, idx0], x[:, 1])
@pytest.mark.parametrize('chunks', [1, 2, 3, 4, 5])
def test_index_with_int_dask_array_nanchunks(chunks):
# Slice by array with nan-sized chunks
a = da.arange(-2, 3, chunks=chunks)
assert_eq(a[a.nonzero()], np.array([-2, -1, 1, 2]))
# Edge case: the nan-sized chunks resolve to size 0
a = da.zeros(5, chunks=chunks)
assert_eq(a[a.nonzero()], np.array([]))
@pytest.mark.parametrize('chunks', [2, 4])
def test_index_with_int_dask_array_negindex(chunks):
a = da.arange(4, chunks=chunks)
idx = da.from_array([-1, -4], chunks=1)
assert_eq(a[idx], np.array([3, 0]))
@pytest.mark.parametrize('chunks', [2, 4])
def test_index_with_int_dask_array_indexerror(chunks):
a = da.arange(4, chunks=chunks)
idx = da.from_array([4], chunks=1)
with pytest.raises(IndexError):
a[idx].compute()
idx = da.from_array([-5], chunks=1)
with pytest.raises(IndexError):
a[idx].compute()
@pytest.mark.parametrize('dtype', ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64'])
def test_index_with_int_dask_array_dtypes(dtype):
a = da.from_array([10, 20, 30, 40], chunks=-1)
idx = da.from_array(np.array([1, 2]).astype(dtype), chunks=1)
assert_eq(a[idx], np.array([20, 30]))
def test_index_with_int_dask_array_nocompute():
""" Test that when the indices are a dask array
they are not accidentally computed
"""
def crash():
raise NotImplementedError()
x = da.arange(5, chunks=-1)
idx = da.Array({('x', 0): (crash,)}, name='x',
chunks=((2,), ), dtype=np.int64)
result = x[idx]
with pytest.raises(NotImplementedError):
result.compute()
def test_index_with_bool_dask_array():
x = np.arange(36).reshape((6, 6))
d = da.from_array(x, chunks=(3, 3))
ind = np.asarray([True, True, False, True, False, False], dtype=bool)
ind = da.from_array(ind, chunks=2)
for index in [ind, (slice(1, 9, 2), ind), (ind, slice(2, 8, 1))]:
x_index = dask.compute(index)[0]
assert_eq(x[x_index], d[index])
def test_index_with_bool_dask_array_2():
x = np.random.random((10, 10, 10))
ind = np.random.random(10) > 0.5
d = da.from_array(x, chunks=(3, 4, 5))
dind = da.from_array(ind, chunks=4)
index = [slice(1, 9, 1), slice(None)]
for i in range(x.ndim):
index2 = index[:]
index2.insert(i, dind)
index3 = index[:]
index3.insert(i, ind)
assert_eq(x[tuple(index3)], d[tuple(index2)])
@pytest.mark.xfail
def test_cull():
x = da.ones(1000, chunks=(10,))
for slc in [1, slice(0, 30), slice(0, None, 100)]:
y = x[slc]
assert len(y.dask) < len(x.dask)
@pytest.mark.parametrize('shape', [(2,), (2, 3), (2, 3, 5)])
@pytest.mark.parametrize('index', [(Ellipsis,),
(None, Ellipsis),
(Ellipsis, None),
(None, Ellipsis, None)])
def test_slicing_with_Nones(shape, index):
x = np.random.random(shape)
d = da.from_array(x, chunks=shape)
assert_eq(x[index], d[index])
indexers = [Ellipsis, slice(2), 0, 1, -2, -1, slice(-2, None), None]
"""
# We comment this out because it is 4096 tests
@pytest.mark.parametrize('a', indexers)
@pytest.mark.parametrize('b', indexers)
@pytest.mark.parametrize('c', indexers)
@pytest.mark.parametrize('d', indexers)
def test_slicing_none_int_ellipses(a, b, c, d):
if (a, b, c, d).count(Ellipsis) > 1:
return
shape = (2,3,5,7,11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
xx = x[a, b, c, d]
yy = y[a, b, c, d]
assert_eq(xx, yy)
"""
def test_slicing_integer_no_warnings():
# https://github.com/dask/dask/pull/2457/
X = da.random.random((100, 2), (2, 2))
idx = np.array([0, 0, 1, 1])
with pytest.warns(None) as rec:
X[idx].compute()
assert len(rec) == 0
@pytest.mark.slow
def test_slicing_none_int_ellipes():
shape = (2, 3, 5, 7, 11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
for ind in itertools.product(indexers, indexers, indexers, indexers):
if ind.count(Ellipsis) > 1:
continue
assert_eq(x[ind], y[ind])
def test_None_overlap_int():
a, b, c, d = (0, slice(None, 2, None), None, Ellipsis)
shape = (2, 3, 5, 7, 11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
xx = x[a, b, c, d]
yy = y[a, b, c, d]
assert_eq(xx, yy)
def test_negative_n_slicing():
assert_eq(da.ones(2, chunks=2)[-2], np.ones(2)[-2])
def test_negative_list_slicing():
x = np.arange(5)
dx = da.from_array(x, chunks=2)
assert_eq(dx[[0, -5]], x[[0, -5]])
assert_eq(dx[[4, -1]], x[[4, -1]])
def test_permit_oob_slices():
x = np.arange(5)
dx = da.from_array(x, chunks=2)
assert_eq(x[-102:], dx[-102:])
assert_eq(x[102:], dx[102:])
assert_eq(x[:102], dx[:102])
assert_eq(x[:-102], dx[:-102])
def test_normalize_index():
assert normalize_index((Ellipsis, None), (10,)) == (slice(None), None)
assert normalize_index(5, (np.nan,)) == (5,)
assert normalize_index(-5, (np.nan,)) == (-5,)
(result,) = normalize_index([-5, -2, 1], (np.nan,))
assert result.tolist() == [-5, -2, 1]
assert normalize_index(slice(-5, -2), (np.nan,)) == (slice(-5, -2),)
def test_take_semi_sorted():
x = da.ones(10, chunks=(5,))
index = np.arange(15) % 10
y = x[index]
assert y.chunks == ((5, 5, 5),)
@pytest.mark.parametrize('chunks,index,expected', [
(
(5, 5, 5),
np.arange(5, 15) % 10,
[(1, np.arange(5)),
(0, np.arange(5))]
),
(
(5, 5, 5, 5),
np.arange(20) // 2,
[(0, np.arange(10) // 2),
(1, np.arange(10) // 2)]
),
(
(10, 10),
[15, 2, 3, 15],
[(1, [5]),
(0, [2, 3]),
(1, [5])]
),
])
def test_slicing_plan(chunks, index, expected):
plan = slicing_plan(chunks, index)
assert len(plan) == len(expected)
for (i, x), (j, y) in zip(plan, expected):
assert i == j
assert len(x) == len(y)
assert (x == y).all()
def test_pathological_unsorted_slicing():
x = da.ones(100, chunks=10)
# [0, 10, 20, ... 90, 1, 11, 21, ... 91, ...]
index = np.arange(100).reshape(10, 10).ravel(order='F')
with pytest.warns(da.PerformanceWarning) as info:
x[index]
assert '10' in str(info.list[0])
assert 'out-of-order' in str(info.list[0])
@pytest.mark.parametrize('params', [(2, 2, 1), (5, 3, 2)])
def test_setitem_with_different_chunks_preserves_shape(params):
""" Reproducer for https://github.com/dask/dask/issues/3730.
Mutating based on an array with different chunks can cause new chunks to be
used. We need to ensure those new chunk sizes are applied to the mutated
array, otherwise the array won't generate the correct keys.
"""
array_size, chunk_size1, chunk_size2 = params
x = da.zeros(array_size, chunks=chunk_size1)
mask = da.zeros(array_size, chunks=chunk_size2)
x[mask] = 1
result = x.compute()
assert x.shape == result.shape
def test_gh3579():
assert_eq(np.arange(10)[0::-1], da.arange(10, chunks=3)[0::-1])
assert_eq(np.arange(10)[::-1], da.arange(10, chunks=3)[::-1])
@pytest.mark.parametrize('lock', [True, False])
@pytest.mark.parametrize('asarray', [True, False])
@pytest.mark.parametrize('fancy', [True, False])
def test_gh4043(lock, asarray, fancy):
a1 = da.from_array(np.zeros(3,), chunks=1, asarray=asarray, lock=lock, fancy=fancy)
a2 = da.from_array(np.ones(3,), chunks=1, asarray=asarray, lock=lock, fancy=fancy)
al = da.stack([a1, a2])
assert_eq(al, al)
| [
[
[
7,
16
],
[
24103,
24112
]
],
[
[
38,
45
],
[
4872,
4879
],
[
4940,
4947
],
[
5007,
5014
],
[
5074,
5081
],
[
5264,
5271
],
[
5331,
5338
],
[
5398,
5405
],
[
5588,
5595
],
[
5656,
5663
],
[
5723,
5730
],
[
5790,
5797
],
[
5984,
5991
],
[
6051,
6058
],
[
6118,
6125
],
[
6360,
6367
],
[
6481,
6488
],
[
6609,
6616
],
[
6952,
6959
],
[
7052,
7059
],
[
7159,
7166
],
[
8159,
8166
],
[
8461,
8468
],
[
8583,
8590
],
[
8875,
8882
],
[
8943,
8950
],
[
9008,
9015
],
[
9296,
9303
],
[
9394,
9401
],
[
9492,
9499
],
[
9587,
9594
],
[
9682,
9689
],
[
9777,
9784
],
[
10089,
10096
],
[
10150,
10157
],
[
10380,
10387
],
[
10569,
10576
],
[
14062,
14069
],
[
14185,
14192
],
[
17460,
17467
],
[
17544,
17551
],
[
17880,
17887
],
[
17969,
17976
],
[
18053,
18060
],
[
18141,
18148
],
[
18229,
18236
],
[
18312,
18319
]
],
[
[
54,
60
],
[
91,
97
],
[
12834,
12840
],
[
14593,
14599
],
[
18758,
18764
],
[
18814,
18820
],
[
19623,
19629
],
[
19951,
19957
],
[
20335,
20341
],
[
20553,
20559
],
[
20890,
20896
],
[
22501,
22507
],
[
22691,
22697
],
[
22752,
22758
],
[
23929,
23935
],
[
25532,
25538
],
[
26554,
26560
],
[
27357,
27363
],
[
27405,
27411
],
[
27456,
27462
],
[
14760,
14766
],
[
14833,
14839
],
[
15189,
15195
],
[
15272,
15278
],
[
16869,
16875
],
[
16975,
16981
],
[
17014,
17020
],
[
18508,
18514
],
[
18557,
18563
],
[
18608,
18614
],
[
18659,
18665
],
[
18712,
18718
],
[
20734,
20740
],
[
20835,
20841
],
[
21610,
21616
],
[
23849,
23855
],
[
26404,
26410
]
],
[
[
79,
84
],
[
10354,
10359
]
],
[
[
86,
88
],
[
14612,
14614
],
[
25616,
25618
],
[
25652,
25654
],
[
25680,
25682
],
[
25738,
25740
],
[
25771,
25773
],
[
25805,
25807
],
[
8895,
8897
],
[
8963,
8965
],
[
9028,
9030
],
[
9050,
9052
],
[
9319,
9321
],
[
9417,
9419
],
[
9515,
9517
],
[
9610,
9612
],
[
9705,
9707
],
[
9800,
9802
],
[
9846,
9848
],
[
10182,
10184
],
[
10716,
10718
],
[
10930,
10932
],
[
11091,
11093
],
[
11426,
11428
],
[
11564,
11566
],
[
11638,
11640
],
[
11938,
11940
],
[
12571,
12573
],
[
12730,
12732
],
[
12919,
12921
],
[
13525,
13527
],
[
13556,
13558
],
[
13970,
13972
],
[
14382,
14384
],
[
14459,
14461
],
[
14920,
14922
],
[
15119,
15121
],
[
15245,
15247
],
[
15328,
15330
],
[
15369,
15371
],
[
15392,
15394
],
[
15535,
15537
],
[
15588,
15590
],
[
15640,
15642
],
[
16086,
16088
],
[
17078,
17080
],
[
17155,
17157
],
[
19126,
19128
],
[
19255,
19257
],
[
19288,
19290
],
[
20169,
20171
],
[
20318,
20320
],
[
20532,
20534
],
[
21159,
21161
],
[
21223,
21225
],
[
21571,
21573
],
[
21720,
21722
],
[
21800,
21802
],
[
22105,
22107
],
[
22146,
22148
],
[
23016,
23018
],
[
23817,
23819
],
[
24020,
24022
],
[
24030,
24032
],
[
24381,
24383
],
[
24391,
24393
],
[
24591,
24593
],
[
24651,
24653
],
[
24818,
24820
],
[
25140,
25142
],
[
25190,
25192
],
[
25255,
25257
],
[
25350,
25352
],
[
25456,
25458
],
[
26346,
26348
],
[
27234,
27236
],
[
27302,
27304
],
[
27566,
27568
],
[
27654,
27656
]
],
[
[
128,
132
],
[
21991,
21995
]
],
[
[
140,
156
],
[
12009,
12011
],
[
12167,
12169
],
[
12523,
12525
],
[
12650,
12652
],
[
12951,
12953
],
[
13819,
13821
],
[
14307,
14309
],
[
14491,
14493
],
[
14795,
14797
],
[
14868,
14870
],
[
14991,
14993
],
[
15097,
15099
],
[
15224,
15226
],
[
15307,
15309
],
[
15450,
15452
],
[
15566,
15568
],
[
15680,
15682
],
[
15882,
15884
],
[
16127,
16129
],
[
16647,
16649
],
[
18475,
18477
],
[
19432,
19434
],
[
19511,
19513
],
[
19758,
19760
],
[
19851,
19853
],
[
20107,
20109
],
[
20261,
20263
],
[
20438,
20440
],
[
20476,
20478
],
[
20658,
20660
],
[
20696,
20698
],
[
20796,
20798
],
[
21092,
21094
],
[
21145,
21147
],
[
21454,
21456
],
[
21488,
21490
],
[
21758,
21760
],
[
21874,
21876
],
[
22182,
22184
],
[
22228,
22230
],
[
22544,
22546
],
[
23048,
23050
],
[
23772,
23774
],
[
24069,
24071
],
[
24430,
24432
],
[
24565,
24567
],
[
24673,
24675
],
[
24840,
24842
],
[
25419,
25421
],
[
26259,
26261
],
[
26417,
26419
],
[
27030,
27032
],
[
27082,
27084
],
[
27256,
27258
],
[
27323,
27325
],
[
27552,
27554
],
[
27640,
27642
],
[
27727,
27729
]
],
[
[
189,
212
],
[
16903,
16926
]
],
[
[
214,
223
],
[
539,
548
],
[
842,
851
],
[
1143,
1152
],
[
1443,
1452
],
[
1744,
1753
],
[
2064,
2073
],
[
2328,
2337
],
[
2474,
2483
],
[
2670,
2679
],
[
2799,
2808
],
[
3139,
3148
],
[
3517,
3526
],
[
3860,
3869
],
[
4204,
4213
],
[
4498,
4507
],
[
4704,
4713
],
[
4754,
4763
]
],
[
[
257,
269
],
[
15973,
15985
]
],
[
[
271,
285
],
[
17048,
17062
],
[
17102,
17116
],
[
17179,
17193
]
],
[
[
287,
298
],
[
5135,
5146
],
[
5460,
5471
],
[
5851,
5862
],
[
6180,
6191
],
[
6729,
6740
],
[
7261,
7272
],
[
7500,
7511
],
[
7751,
7762
],
[
8073,
8084
],
[
8299,
8310
],
[
10853,
10864
],
[
11004,
11015
],
[
11172,
11183
],
[
11348,
11359
],
[
11486,
11497
],
[
11853,
11864
],
[
17391,
17402
],
[
17811,
17822
]
],
[
[
332,
336
],
[
8790,
8794
],
[
9174,
9178
],
[
10004,
10008
],
[
10271,
10275
]
],
[
[
338,
353
],
[
11807,
11822
],
[
25045,
25060
],
[
25120,
25135
],
[
25169,
25184
],
[
25225,
25240
],
[
25318,
25333
]
],
[
[
355,
367
],
[
26010,
26022
]
],
[
[
398,
407
],
[
12591,
12600
],
[
12717,
12726
],
[
13176,
13185
],
[
13240,
13249
],
[
13294,
13303
],
[
13383,
13392
],
[
13723,
13732
],
[
13957,
13966
],
[
14369,
14378
],
[
14530,
14539
],
[
14981,
14990
],
[
15087,
15096
],
[
15440,
15449
],
[
15556,
15565
],
[
15712,
15721
],
[
15757,
15766
],
[
15802,
15811
],
[
19554,
19563
],
[
19587,
19596
],
[
19882,
19891
],
[
19917,
19926
],
[
20143,
20152
],
[
20292,
20301
],
[
20514,
20523
],
[
21205,
21214
],
[
22022,
22031
],
[
22452,
22461
],
[
23084,
23093
],
[
24228,
24237
],
[
24500,
24509
],
[
24555,
24564
],
[
24704,
24713
],
[
24743,
24752
],
[
24872,
24881
],
[
24907,
24916
],
[
24940,
24949
],
[
24973,
24982
],
[
27224,
27233
],
[
27292,
27301
],
[
27750,
27759
]
],
[
[
409,
418
],
[
16170,
16179
],
[
16203,
16212
],
[
16258,
16267
],
[
16307,
16316
],
[
16350,
16359
],
[
16387,
16396
],
[
16436,
16445
],
[
16474,
16483
],
[
16513,
16522
],
[
16547,
16556
],
[
16684,
16693
],
[
16720,
16729
],
[
16757,
16766
],
[
16796,
16805
]
],
[
[
425,
438
]
],
[
[
4651,
4689
]
],
[
[
4808,
4827
]
],
[
[
6276,
6295
]
],
[
[
7399,
7423
]
],
[
[
8013,
8048
]
],
[
[
8249,
8274
]
],
[
[
8759,
8768
]
],
[
[
9966,
9982
]
],
[
[
10809,
10828
]
],
[
[
11301,
11331
]
],
[
[
11974,
11997
]
],
[
[
12132,
12155
]
],
[
[
12476,
12493
]
],
[
[
12613,
12638
]
],
[
[
12759,
12769
],
[
12994,
13004
]
],
[
[
12882,
12907
]
],
[
[
13765,
13807
]
],
[
[
14279,
14295
]
],
[
[
14421,
14447
]
],
[
[
14722,
14747
]
],
[
[
15144,
15176
]
],
[
[
15613,
15628
]
],
[
[
15849,
15867
]
],
[
[
15941,
15958
]
],
[
[
16045,
16074
]
],
[
[
16586,
16635
]
],
[
[
16829,
16856
]
],
[
[
16943,
16962
]
],
[
[
17223,
17244
]
],
[
[
18449,
18463
]
],
[
[
18894,
18924
]
],
[
[
19672,
19705
]
],
[
[
20006,
20046
]
],
[
[
20381,
20420
]
],
[
[
20599,
20640
]
],
[
[
21038,
21075
]
],
[
[
21249,
21289
]
],
[
[
21677,
21708
]
],
[
[
22060,
22093
]
],
[
[
22523,
22532
]
],
[
[
22969,
22992
]
],
[
[
23116,
23124
],
[
24121,
24129
],
[
24131,
24139
],
[
24141,
24149
],
[
24151,
24159
]
],
[
[
23682,
23714
]
],
[
[
23950,
23979
]
],
[
[
24260,
24281
]
],
[
[
24524,
24547
]
],
[
[
24613,
24639
]
],
[
[
24784,
24806
]
],
[
[
25010,
25030
]
],
[
[
25386,
25407
]
],
[
[
25955,
25972
]
],
[
[
26213,
26247
]
],
[
[
26616,
26666
]
],
[
[
27205,
27216
]
],
[
[
27508,
27519
]
]
] |
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import math
import os
import sys
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
sys.path.insert(0, top_dir)
from zag import engines
from zag.patterns import linear_flow
from zag import task
# INTRO: This shows how to use a tasks/atoms ability to take requirements from
# its execute functions default parameters and shows how to provide those
# via different methods when needed, to influence those parameters to in
# this case calculate the distance between two points in 2D space.
# A 2D point.
Point = collections.namedtuple("Point", "x,y")
def is_near(val, expected, tolerance=0.001):
# Floats don't really provide equality...
if val > (expected + tolerance):
return False
if val < (expected - tolerance):
return False
return True
class DistanceTask(task.Task):
# See: http://en.wikipedia.org/wiki/Distance#Distance_in_Euclidean_space
default_provides = 'distance'
def execute(self, a=Point(0, 0), b=Point(0, 0)):
return math.sqrt(math.pow(b.x - a.x, 2) + math.pow(b.y - a.y, 2))
if __name__ == '__main__':
# For these we rely on the execute() methods points by default being
# at the origin (and we override it with store values when we want) at
# execution time (which then influences what is calculated).
any_distance = linear_flow.Flow("origin").add(DistanceTask())
results = engines.run(any_distance)
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
0.0,
is_near(results['distance'], 0.0)))
results = engines.run(any_distance, store={'a': Point(1, 1)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
1.4142,
is_near(results['distance'],
1.4142)))
results = engines.run(any_distance, store={'a': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
14.14199,
is_near(results['distance'],
14.14199)))
results = engines.run(any_distance,
store={'a': Point(5, 5), 'b': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
7.07106,
is_near(results['distance'],
7.07106)))
# For this we use the ability to override at task creation time the
# optional arguments so that we don't need to continue to send them
# in via the 'store' argument like in the above (and we fix the new
# starting point 'a' at (10, 10) instead of (0, 0)...
ten_distance = linear_flow.Flow("ten")
ten_distance.add(DistanceTask(inject={'a': Point(10, 10)}))
results = engines.run(ten_distance, store={'b': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
0.0,
is_near(results['distance'], 0.0)))
results = engines.run(ten_distance)
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
14.14199,
is_near(results['distance'],
14.14199)))
| [
[
[
673,
684
],
[
1314,
1325
]
],
[
[
692,
696
],
[
1792,
1796
],
[
1802,
1806
],
[
1827,
1831
]
],
[
[
704,
706
],
[
729,
731
],
[
745,
747
],
[
758,
760
],
[
824,
826
],
[
874,
876
]
],
[
[
714,
717
],
[
886,
889
]
],
[
[
719,
726
],
[
905,
912
]
],
[
[
931,
938
],
[
2173,
2180
],
[
2424,
2431
],
[
2758,
2765
],
[
3098,
3105
],
[
3862,
3869
],
[
4141,
4148
]
],
[
[
964,
975
],
[
2112,
2123
],
[
3760,
3771
]
],
[
[
992,
996
],
[
1599,
1603
]
],
[
[
1306,
1311
],
[
1748,
1753
],
[
1763,
1768
],
[
2462,
2467
],
[
2796,
2801
],
[
3162,
3167
],
[
3180,
3185
],
[
3831,
3836
],
[
3900,
3905
]
],
[
[
1359,
1366
],
[
2373,
2380
],
[
2653,
2660
],
[
2991,
2998
],
[
3374,
3381
],
[
4090,
4097
],
[
4346,
4353
]
],
[
[
1586,
1598
],
[
2143,
2155
],
[
3805,
3817
]
],
[
[
2097,
2109
],
[
2185,
2197
],
[
2436,
2448
],
[
2770,
2782
],
[
3110,
3122
]
],
[
[
2163,
2170
],
[
2209,
2216
],
[
2261,
2268
],
[
2381,
2388
]
],
[
[
2414,
2421
],
[
2486,
2493
],
[
2538,
2545
],
[
2661,
2668
]
],
[
[
2748,
2755
],
[
2822,
2829
],
[
2874,
2881
],
[
2999,
3006
]
],
[
[
3088,
3095
],
[
3206,
3213
],
[
3258,
3265
],
[
3382,
3389
]
],
[
[
3745,
3757
],
[
3788,
3800
],
[
3874,
3886
],
[
4153,
4165
]
],
[
[
3852,
3859
],
[
3926,
3933
],
[
3978,
3985
],
[
4098,
4105
]
],
[
[
4131,
4138
],
[
4177,
4184
],
[
4229,
4236
],
[
4354,
4361
]
]
] |
"""Test package."""
import shapely.geometry
import simpy
import openclsim.core as core
import openclsim.model as model
from .test_utils import assert_log
def test_test_resource_synchronization():
"""Test resource Synchronization."""
simulation_start = 0
my_env = simpy.Environment(initial_time=simulation_start)
registry = {}
Site = type(
"Site",
(
core.Identifiable,
core.Log,
core.Locatable,
core.HasContainer,
core.HasResource,
),
{},
)
TransportProcessingResource = type(
"TransportProcessingResource",
(
core.Identifiable,
core.Log,
core.ContainerDependentMovable,
core.Processor,
core.HasResource,
core.LoadingFunction,
core.UnloadingFunction,
),
{},
)
location_from_site = shapely.geometry.Point(4.18055556, 52.18664444)
from_site = Site(
env=my_env,
name="Winlocatie",
ID="6dbbbdf4-4589-11e9-a501-b469212bff5d",
geometry=location_from_site,
capacity=10,
level=8,
)
hopper1 = TransportProcessingResource(
env=my_env,
name="Hopper 01",
ID="6dbbbdf6-4589-11e9-95a2-b469212bff5b",
geometry=location_from_site,
loading_rate=1,
unloading_rate=1,
capacity=4,
compute_v=lambda x: 10,
)
hopper2 = TransportProcessingResource(
env=my_env,
name="Hopper 02",
ID="5dbbbdf6-4589-11e9-95a2-b469212bff5b",
geometry=location_from_site,
loading_rate=1,
unloading_rate=1,
capacity=4,
compute_v=lambda x: 10,
)
requested_resources1 = {}
activity1 = model.ShiftAmountActivity(
env=my_env,
name="Transfer1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff52",
registry=registry,
processor=hopper1,
origin=from_site,
destination=hopper1,
amount=1,
duration=20,
requested_resources=requested_resources1,
)
seq_activity1 = model.SequentialActivity(
env=my_env,
name="Sequential process1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff60",
registry=registry,
sub_processes=[activity1],
requested_resources=requested_resources1,
)
while1 = model.WhileActivity(
env=my_env,
name="while1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff5g",
registry=registry,
sub_processes=[seq_activity1],
requested_resources=requested_resources1,
condition_event=[
{
"or": [
{"type": "container", "concept": hopper1, "state": "full"},
{"type": "container", "concept": from_site, "state": "empty"},
]
}
],
)
activity2 = model.ShiftAmountActivity(
env=my_env,
name="Transfer2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff52",
registry=registry,
processor=hopper2,
origin=from_site,
destination=hopper2,
amount=1,
duration=20,
)
seq_activity2 = model.SequentialActivity(
env=my_env,
name="Sequential process2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff60",
registry=registry,
sub_processes=[activity2],
)
while2 = model.WhileActivity(
env=my_env,
name="while2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff5g",
registry=registry,
sub_processes=[seq_activity2],
condition_event=[
{
"or": [
{"type": "container", "concept": hopper2, "state": "full"},
{"type": "container", "concept": from_site, "state": "empty"},
]
}
],
)
model.register_processes([while1, while2])
my_env.run()
assert my_env.now == 160
assert_log(from_site)
assert_log(while1)
assert_log(while2)
| [
[
[
28,
44
],
[
932,
939
]
],
[
[
52,
57
],
[
281,
286
]
],
[
[
66,
88
],
[
404,
408
],
[
435,
439
],
[
457,
461
],
[
485,
489
],
[
516,
520
],
[
664,
668
],
[
695,
699
],
[
717,
721
],
[
761,
765
],
[
789,
793
],
[
819,
823
],
[
853,
857
]
],
[
[
96,
120
],
[
1801,
1806
],
[
2150,
2155
],
[
2415,
2420
],
[
2939,
2944
],
[
3238,
3243
],
[
3452,
3457
],
[
3914,
3919
]
],
[
[
146,
156
],
[
4008,
4018
],
[
4034,
4044
],
[
4057,
4067
]
],
[
[
163,
197
]
]
] |
from confluent_kafka import Producer
import socket
if __name__ == '__main__':
print("Starting Kafka Producer")
producer_config = {'client.id': socket.gethostname(),
'bootstrap.servers': 'localhost:9092'}
print("Creating Producer")
producer = Producer(producer_config)
print("Producing Kafka Message")
for i in range(1, 101):
for j in range(1, 10001):
producer.produce('hello-producer', key=str(j*i), value="Simple Message-" + str(j*i))
producer.poll()
producer.flush()
print("Finished Kafka Producer")
| [
[
[
28,
36
],
[
284,
292
]
],
[
[
44,
50
],
[
153,
159
]
],
[
[
121,
136
],
[
293,
308
]
],
[
[
273,
281
],
[
422,
430
],
[
515,
523
],
[
536,
544
]
],
[
[
356,
357
],
[
467,
468
],
[
503,
504
]
],
[
[
388,
389
],
[
465,
466
],
[
501,
502
]
]
] |
from membase.api.rest_client import RestConnection, RestHelper
import urllib.request, urllib.parse, urllib.error
import json
from remote.remote_util import RemoteMachineShellConnection, RemoteMachineHelper
from newupgradebasetest import NewUpgradeBaseTest
from security.auditmain import audit
import subprocess
import socket
import fileinput
import sys
from subprocess import Popen, PIPE
from .SecretsMasterBase import SecretsMasterBase
from basetestcase import BaseTestCase
import _thread
from testconstants import STANDARD_BUCKET_PORT
from membase.api.rest_client import RestConnection, Bucket, RestHelper
from membase.api.exception import BucketCreationException
from membase.helper.bucket_helper import BucketOperationHelper
from couchbase_helper.documentgenerator import BlobGenerator
class SecretsMgmtTests(BaseTestCase):
def setUp(self):
super(SecretsMgmtTests, self).setUp()
self.secretmgmt_base_obj = SecretsMasterBase(self.master)
self.password = self.input.param('password', 'p@ssword')
enable_audit = self.input.param('audit', None)
if enable_audit:
Audit = audit(host=self.master)
currentState = Audit.getAuditStatus()
self.log.info("Current status of audit on ip - {0} is {1}".format(self.master.ip, currentState))
if not currentState:
self.log.info("Enabling Audit ")
Audit.setAuditEnable('true')
self.sleep(30)
def tearDown(self):
self.log.info("---------------Into Teardown---------------")
for server in self.servers:
self.secretmgmt_base_obj = SecretsMasterBase(server)
self.secretmgmt_base_obj.set_password(server, "")
self.secretmgmt_base_obj.change_config_to_orginal(server, "")
log_dir = (self.secretmgmt_base_obj.get_log_dir(server))[1:-1]
babysitter_file = str(log_dir + "/babysitter.log")
shell = RemoteMachineShellConnection(server)
command = str(" mv " + babysitter_file + " " + log_dir + "/babysitterOLD.log")
shell.execute_command(command=command)
self.print_memcached_ip()
shell.disconnect()
super(SecretsMgmtTests, self).tearDown()
def suite_setUp(self):
self.log.info("---------------Suite Setup---------------")
def suite_tearDown(self):
self.log.info("---------------Suite Teardown---------------")
def print_memcached_ip(self):
shell = RemoteMachineShellConnection(self.master)
o, _ = shell.execute_command("ps aux | grep 'memcached' | awk '{print $2}'")
if o:
mem_pid = o[0]
shell.disconnect()
def test_evn_variable(self):
self.secretmgmt_base_obj.set_password(self.master, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_return = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_return, "Babysitter.log does not contain node initialization code")
def test_multiple_prompt_3times(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
# self.secretmgmt_base_obj.incorrect_password(self.master,cmd="/opt/couchbase/bin/cbmaster_password")
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd)
self.assertTrue(temp_result, "Issue with passing incorrect password 3 times")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
print('Process Memcached is not running')
# shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
shell.execute_command(
"export CB_MASTER_PASSWORD=" + self.password + "; /opt/couchbase/etc/couchbase_init.d start")
def test_multiple_prompt_enter_correct_2retries(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
# self.secretmgmt_base_obj.incorrect_password(self.master, cmd="/opt/couchbase/bin/cbmaster_password",
# retries_number=2,input_correct_pass=True,correct_pass=self.password)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd,
retries_number=2, input_correct_pass=True,
correct_pass=self.password)
self.assertTrue(temp_result, "Issue with incorrect password for 2 times and then correct password")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_multiple_prompt_enter_correct_1retries(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
# self.secretmgmt_base_obj.incorrect_password(self.master, cmd="/opt/couchbase/bin/cbmaster_password",
# retries_number=1, input_correct_pass=True, correct_pass='temp')
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd,
retries_number=1, input_correct_pass=True,
correct_pass=self.password)
self.assertTrue(temp_result, "Issue with incorrect password for 1 times and then correct password")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_prompt_enter_correct_password(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
shell.disconnect()
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
# self.secretmgmt_base_obj.incorrect_password(self.master, cmd="/opt/couchbase/bin/cbmaster_password",
# retries_number=1, input_correct_pass=True, correct_pass='temp')
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.correct_password_on_prompt(self.master, self.password, cmd=cmd)
self.assertTrue(temp_result, "Issue with passing in correct password on prompt")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_env_variable_change_pass(self):
new_pass = self.input.param("new_password", "new_p@ssw0rd")
self.secretmgmt_base_obj.set_password(self.master, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
self.secretmgmt_base_obj.set_password(self.master, new_pass)
self.secretmgmt_base_obj.restart_server_with_env(self.master, new_pass)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def generate_pass(self):
type = self.input.param("type", 'char')
pass_length = self.input.param('pass_length', 10)
num_pass = self.input.param('num_pass', 10)
if type in ('char', 'int', 'ext'):
pass_list = self.secretmgmt_base_obj.generate_password_simple(type, pass_length, num_pass)
else:
pass_list = self.secretmgmt_base_obj.generate_password_dual(type, pass_length, num_pass)
for item in pass_list:
item = item.decode('ISO-8859-1').strip()
self.secretmgmt_base_obj.set_password(self.master, item)
self.secretmgmt_base_obj.restart_server_with_env(self.master, item)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def generate_pass_file(self):
with open("./pytests/security/password_list.txt") as f:
for item in f:
item = item.decode('ISO-8859-1').strip()
self.secretmgmt_base_obj.set_password(self.master, item)
self.secretmgmt_base_obj.restart_server_with_env(self.master, item)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def test_cluster_rebalance_in_env_var(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, "Rebalance-in did not complete with password setup node")
def test_cluster_rebalance_out(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_prompt(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password, startup_type='prompt')
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, 'Rebalance-in did not complete with password node setup')
def test_cluster_rebalance_out_prompt(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password, startup_type='prompt')
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_diff_modes(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
extra_pass = self.input.param('extra_pass', 'p@ssw0rd1')
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, 'Rebalance-in did not complete with password node setup')
def test_cluster_rebalance_out_diff_modes(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
extra_pass = self.input.param('extra_pass', 'p@ssw0rd1')
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_in_env_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
self.assertTrue(rebalance.result(), "Issue with Reablance in with different services")
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_in_diff_type_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, self.password, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
self.assertTrue(rebalance.result(), "Rebalance in with different servers")
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_out_env_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
print(("result of rebalance is {0}".format(rebalance.result())))
servers_out = self.servers[2:]
rebalance = self.cluster.async_rebalance(self.servers, [], servers_out)
print(("result of rebalance is {0}".format(rebalance.result())))
self.assertTrue(rebalance.result(), "Rebalance out with different service")
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_out_diff_type_var_services(self):
extra_pass = self.input.param("extra_pass", 'p@ssw0rd01')
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
rebalance.result()
servers_out = self.servers[1:]
rebalance = self.cluster.async_rebalance(self.servers, [], servers_out)
print((rebalance.result()))
self.assertTrue(rebalance.result(), "Rebalance in and out with different servers")
# services_init = kv - kv:n1ql - index - kv:index, nodes_init = 4, nodes_out = 1, nodes_out_dist = kv:1, graceful = False
# services_init = kv - kv:n1ql - index - kv:index, nodes_init = 4, nodes_out = 1, nodes_out_dist = kv:1, graceful = False,recoveryType=delta
def test_failover_add_back(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
try:
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
self.sleep(30)
rest = RestConnection(self.master)
self.graceful = self.input.param('graceful', False)
recoveryType = self.input.param("recoveryType", "full")
self.find_nodes_in_list()
self.generate_map_nodes_out_dist()
servr_out = self.nodes_out_list
nodes_all = rest.node_statuses()
failover_task = self.cluster.async_failover([self.master],
failover_nodes=servr_out, graceful=self.graceful)
failover_task.result()
nodes_all = rest.node_statuses()
nodes = []
if servr_out[0].ip == "127.0.0.1":
for failover_node in servr_out:
nodes.extend([node for node in nodes_all
if (str(node.port) == failover_node.port)])
else:
for failover_node in servr_out:
nodes.extend([node for node in nodes_all
if node.ip == failover_node.ip])
for node in nodes:
self.log.info(node)
rest.add_back_node(node.id)
rest.set_recovery_type(otpNode=node.id, recoveryType=recoveryType)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [], [])
self.assertTrue(rebalance.result(), "Failover with different servers")
except Exception as ex:
raise
# services_init=kv-kv-index-index:n1ql,nodes_init=4,nodes_out=1,nodes_out_dist=kv:1,graceful=True
def test_failover(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
try:
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, 'temp')
self.sleep(30)
self.find_nodes_in_list()
self.generate_map_nodes_out_dist()
servr_out = self.nodes_out_list
print(servr_out)
self.graceful = self.input.param('graceful', False)
failover_task = self.cluster.async_failover([self.master],
failover_nodes=servr_out, graceful=self.graceful)
failover_task.result()
self.log.info("Rebalance first time")
rebalance = self.cluster.rebalance(self.servers[:self.nodes_init], [], [])
self.log.info("Rebalance Second time")
rebalance = self.cluster.rebalance(self.servers[:self.nodes_init], [], [])
except Exception as ex:
raise
# services_init=kv-kv-index-index:n1ql,nodes_init=4,targetProcess=memcached
# services_init=kv-kv-index-index:n1ql,nodes_init=4,targetProcess=babysitter
def kill_process(self):
self.targetProcess = self.input.param("targetProcess", 'memcached')
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
for servers in self.servers:
remote = RemoteMachineShellConnection(servers)
if self.targetProcess == "memcached":
remote.kill_memcached()
else:
remote.terminate_process(process_name=self.targetProcess)
for servers in self.servers:
self.secretmgmt_base_obj.restart_server_with_env(servers, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Issue with server restart after killing of process")
def restart_server(self):
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
for servers in self.servers:
self.secretmgmt_base_obj.restart_server_with_env(servers, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Issue with server restart of server")
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=sasl
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=standard
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=standard,password=a@cd#efgh@
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=standard,password=a@cd#efgh@
def test_bucket_create_password(self, bucket_name='secretsbucket', num_replicas=1, bucket_size=100):
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
bucket_type = self.input.param("bucket_type", 'couchbase')
tasks = []
if bucket_type == 'couchbase':
# self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas)
rest = RestConnection(self.master)
rest.create_bucket(bucket_name, ramQuotaMB=100)
elif bucket_type == 'standard':
self.cluster.create_standard_bucket(self.master, bucket_name, STANDARD_BUCKET_PORT + 1,
bucket_size)
elif bucket_type == "memcached":
tasks.append(
self.cluster.async_create_memcached_bucket(self.master, bucket_name, STANDARD_BUCKET_PORT + 1,
bucket_size))
for task in tasks:
self.assertTrue(task.result(), "Issue with bucket creation")
else:
self.log.error('Bucket type not specified')
return
self.assertTrue(BucketOperationHelper.wait_for_bucket_creation(bucket_name, RestConnection(self.master)),
msg='failed to start up bucket with name "{0}'.format(bucket_name))
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
install_path = self.secretmgmt_base_obj._get_install_path(self.master)
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, '/config/config.dat',
self.password)
self.assertTrue(temp_result, "Password found in config.dat")
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, 'isasl.pw', self.password)
self.assertTrue(temp_result, "Password found in isasl.pw")
def test_bucket_edit_password(self, bucket_name='secretsbucket', num_replicas=1, bucket_size=100):
updated_pass = "p@ssw0rd_updated"
rest = RestConnection(self.master)
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
bucket_type = self.input.param("bucket_type", 'standard')
tasks = []
if bucket_type == 'sasl':
self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas, bucket_size)
self.sleep(10)
rest.change_bucket_props(bucket_name, saslPassword=updated_pass)
else:
self.log.error('Bucket type not specified')
return
self.assertTrue(BucketOperationHelper.wait_for_bucket_creation(bucket_name, RestConnection(self.master)),
msg='failed to start up bucket with name "{0}'.format(bucket_name))
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
install_path = self.secretmgmt_base_obj._get_install_path(self.master)
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, '/config/config.dat',
updated_pass)
self.assertTrue(temp_result, "Password found in config.dat")
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, 'isasl.pw', updated_pass)
self.assertTrue(temp_result, "Password found in isasl.pw")
def test_cli_setting(self):
temp_result = self.secretmgmt_base_obj.execute_cli(self.master, new_password=self.password)
self.assertTrue(temp_result, "Output of the command is not correct")
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def test_cbcollect(self):
rest = RestConnection(self.master)
bucket_name = 'cbcollectbucket'
num_replicas = 1
bucket_size = 100
# self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas, bucket_size)
rest.create_bucket(bucket_name, ramQuotaMB=100)
result = self.secretmgmt_base_obj.generate_cb_collect(self.master, "cbcollect.zip", self.password)
self.assertTrue(result, "Bucket password appears in the cbcollect info")
def rotate_data_key(self):
temp_result = self.secretmgmt_base_obj.read_ns_config(self.master)
self.assertTrue(temp_result, "Config.dat is not refereshed after data key")
def cli_rotate_key(self):
temp_result = self.secretmgmt_base_obj.execute_cli_rotate_key(self.master)
self.assertTrue(temp_result, "Issue with rotate key on cli side")
def audit_change_password(self):
self.secretmgmt_base_obj.set_password(self.master, self.password)
Audit = audit(eventID='8233', host=self.master)
expectedResults = {"real_userid:source": "ns_server", "real_userid:user": "Administrator",
"ip": self.ipAddress, "port": 123456}
fieldVerification, valueVerification = self.Audit.validateEvents(expectedResults)
self.assertTrue(fieldVerification, "One of the fields is not matching")
self.assertTrue(valueVerification, "Values for one of the fields is not matching")
def audit_change_password(self):
self.secretmgmt_base_obj.execute_cli_rotate_key(self.master)
Audit = audit(eventID='8234', host=self.master)
expectedResults = {"real_userid:source": "ns_server", "real_userid:user": "Administrator",
"ip": self.ipAddress, "port": 123456}
fieldVerification, valueVerification = self.Audit.validateEvents(expectedResults)
self.assertTrue(fieldVerification, "One of the fields is not matching")
self.assertTrue(valueVerification, "Values for one of the fields is not matching")
class SecretsMgmtUpgrade(NewUpgradeBaseTest):
def setUp(self):
super(SecretsMgmtUpgrade, self).setUp()
self.initial_version = self.input.param("initial_version", '4.1.0-5005')
self.upgrade_version = self.input.param("upgrade_version", "4.6.0-3467")
self.secretmgmt_base_obj = SecretsMasterBase(self.master)
self.password = self.input.param('password', 'password')
def tearDown(self):
super(SecretsMgmtUpgrade, self).tearDown()
def upgrade_all_nodes(self):
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=self.servers)
for threads in upgrade_threads:
threads.join()
for server in self.servers:
self.secretmgmt_base_obj.setup_pass_node(server, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
for server in self.servers:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
def upgrade_all_nodes_post_463(self):
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
for server in self.servers:
self.secretmgmt_base_obj.setup_pass_node(server, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=self.servers)
for threads in upgrade_threads:
threads.join()
for server in self.servers:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
def upgrade_half_nodes(self):
serv_upgrade = self.servers[2:4]
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=serv_upgrade)
for threads in upgrade_threads:
threads.join()
for server in serv_upgrade:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
| [
[
[
36,
50
]
],
[
[
52,
62
]
],
[
[
70,
84
]
],
[
[
86,
98
]
],
[
[
100,
112
]
],
[
[
120,
124
]
],
[
[
156,
184
],
[
1959,
1987
],
[
2503,
2531
],
[
3249,
3277
],
[
3987,
4015
],
[
4597,
4625
],
[
5696,
5724
],
[
6081,
6109
],
[
7175,
7203
],
[
7551,
7579
],
[
8468,
8496
],
[
23267,
23295
]
],
[
[
186,
205
],
[
4044,
4063
],
[
5753,
5772
],
[
7232,
7251
],
[
8525,
8544
]
],
[
[
237,
255
],
[
31277,
31295
]
],
[
[
287,
292
],
[
1129,
1134
],
[
30197,
30202
],
[
30785,
30790
]
],
[
[
300,
310
]
],
[
[
318,
324
]
],
[
[
332,
341
]
],
[
[
349,
352
]
],
[
[
376,
381
]
],
[
[
383,
387
]
],
[
[
419,
436
],
[
933,
950
],
[
1639,
1656
],
[
31565,
31582
]
],
[
[
462,
474
],
[
815,
827
]
],
[
[
482,
489
]
],
[
[
516,
536
],
[
25560,
25580
],
[
25799,
25819
]
],
[
[
573,
587
],
[
20145,
20159
],
[
25358,
25372
],
[
26179,
26193
],
[
27168,
27182
],
[
27818,
27832
],
[
29217,
29231
],
[
32547,
32561
],
[
33873,
33887
],
[
34801,
34815
]
],
[
[
589,
595
]
],
[
[
597,
607
]
],
[
[
642,
665
]
],
[
[
707,
728
],
[
26119,
26140
],
[
27758,
27779
]
],
[
[
776,
789
],
[
11225,
11238
],
[
11763,
11776
],
[
12409,
12422
],
[
12977,
12990
],
[
13650,
13663
],
[
14531,
14544
],
[
15575,
15588
],
[
16331,
16344
],
[
17340,
17353
],
[
18417,
18430
],
[
19808,
19821
],
[
21758,
21771
],
[
26320,
26333
],
[
27959,
27972
]
],
[
[
798,
814
],
[
866,
882
],
[
2221,
2237
]
],
[
[
31258,
31276
],
[
31334,
31352
],
[
31700,
31718
]
]
] |
from django.conf.urls import url
from test_app.views.home import Home
from test_app.views.ajax import Ajax
app_name = "test_app"
urlpatterns = [
url(regex=r"^$", view=Home, name="home"),
url(regex=r"^ajax$", view=Ajax, name="ajax"),
]
| [
[
[
29,
32
],
[
152,
155
],
[
198,
201
]
],
[
[
66,
70
],
[
174,
178
]
],
[
[
103,
107
],
[
224,
228
]
],
[
[
109,
117
]
],
[
[
132,
143
]
]
] |
import arrow
def __mask_day(date_str):
return date_str[:8] + "**"
def __mask_month(date_str):
return date_str[:5] + "**" + date_str[7:]
def encrypt_day(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_day(date_str)
def encrypt_month(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_month(date_str)
def encrypt_month_day(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_day(__mask_month(date_str))
| [
[
[
7,
12
],
[
199,
204
],
[
343,
348
],
[
493,
498
]
],
[
[
19,
29
],
[
269,
279
],
[
563,
573
]
],
[
[
78,
90
],
[
413,
425
],
[
574,
586
]
],
[
[
154,
165
]
],
[
[
296,
309
]
],
[
[
442,
459
]
]
] |
import discord
import config
import requests
client = discord.Client()
@client.event
async def on_ready():
for guild_id in client.guilds:
if guild_id.name == config.DISCORD_GUILD_NAME:
break
print(
f'{client.user} is connected to {guild_id.name}(id: {guild_id.id})'
)
@client.event
async def on_message(message):
if message.author == client.user:
return
wordbank = ['cat', 'puppy', 'bunny', 'giraffe', 'poop']
if message.content == 'pycascade':
response = 'Hello everyone! Welcome and have a great time!'
await message.channel.send(response)
elif message.content in wordbank:
await message.channel.send("please don't use bad words")
elif 'pokemon' in message.content:
# input: pokemon pikachu
pokemon = message.content.split()[1]
req = requests.get(f"https://getpokemonweakness.azurewebsites.net/api/getweakness?pokemon={pokemon}")
await message.channel.send(req.content)
client.run(config.DISCORD_BOT_TOKEN) | [
[
[
7,
14
],
[
55,
62
]
],
[
[
22,
28
],
[
1023,
1029
],
[
172,
178
]
],
[
[
36,
44
],
[
868,
876
]
],
[
[
46,
52
],
[
74,
80
],
[
324,
330
],
[
1012,
1018
],
[
129,
135
],
[
247,
253
],
[
393,
399
]
],
[
[
87,
321
]
],
[
[
337,
1011
]
]
] |
# Generated by Django 3.1.13 on 2021-09-07 16:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=False, max_length=200)),
],
),
migrations.AddField(
model_name='user',
name='role',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='users.role'),
),
]
| [
[
[
72,
82
],
[
142,
152
],
[
256,
266
],
[
562,
572
]
],
[
[
84,
90
],
[
349,
355
],
[
466,
472
],
[
657,
663
]
],
[
[
98,
123
],
[
708,
714
]
],
[
[
132,
141
]
]
] |
from typing import Any, Dict, Optional, Union, cast
import httpx
from ...client import Client
from ...models.file_conversion_with_output import FileConversionWithOutput
from ...models.error import Error
from ...models.file_conversion_output_format import FileConversionOutputFormat
from ...models.file_conversion_source_format import FileConversionSourceFormat
from ...types import Response
def _get_kwargs(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/file/conversion/{src_format}/{output_format}".format(client.base_url, output_format=output_format, src_format=src_format)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"content": body,
}
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
if response.status_code == 201:
response_201 = FileConversionWithOutput.from_dict(response.json())
return response_201
if response.status_code == 400:
response_4XX = Error.from_dict(response.json())
return response_4XX
if response.status_code == 500:
response_5XX = Error.from_dict(response.json())
return response_5XX
return None
def _build_response(*, response: httpx.Response) -> Response[Union[Any, FileConversionWithOutput, Error]]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Convert a CAD file from one format to another. If the file being converted is larger than 30MB, it will be performed asynchronously.
If the conversion is performed synchronously, the contents of the converted file (`output`) will be returned as a base64 encoded string.
If the conversion is performed asynchronously, the `id` of the conversion will be returned. You can use the `id` returned from the request to get status information about the async conversion from the `/file/conversions/{id}` endpoint. """
return sync_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
).parsed
async def asyncio_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
async def asyncio(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Convert a CAD file from one format to another. If the file being converted is larger than 30MB, it will be performed asynchronously.
If the conversion is performed synchronously, the contents of the converted file (`output`) will be returned as a base64 encoded string.
If the conversion is performed asynchronously, the `id` of the conversion will be returned. You can use the `id` returned from the request to get status information about the async conversion from the `/file/conversions/{id}` endpoint. """
return (
await asyncio_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
).parsed
| [
[
[
19,
22
],
[
546,
549
],
[
706,
709
],
[
754,
757
],
[
977,
980
],
[
1433,
1436
],
[
1792,
1795
],
[
2208,
2211
],
[
3053,
3056
],
[
3521,
3524
]
],
[
[
24,
28
],
[
536,
540
],
[
696,
700
],
[
744,
748
]
],
[
[
30,
38
],
[
962,
970
],
[
2193,
2201
],
[
3506,
3514
]
],
[
[
40,
45
],
[
971,
976
],
[
1427,
1432
],
[
1786,
1791
],
[
2202,
2207
],
[
3047,
3052
],
[
3515,
3520
]
],
[
[
47,
51
]
],
[
[
60,
65
],
[
943,
948
],
[
1399,
1404
],
[
1957,
1962
],
[
3218,
3223
]
],
[
[
89,
95
],
[
523,
529
],
[
1764,
1770
],
[
2180,
2186
],
[
3025,
3031
],
[
3493,
3499
]
],
[
[
146,
170
],
[
982,
1006
],
[
1067,
1091
],
[
1438,
1462
],
[
1797,
1821
],
[
2213,
2237
],
[
3058,
3082
],
[
3526,
3550
]
],
[
[
199,
204
],
[
1008,
1013
],
[
1191,
1196
],
[
1296,
1301
],
[
1464,
1469
],
[
1823,
1828
],
[
2239,
2244
],
[
3084,
3089
],
[
3552,
3557
]
],
[
[
257,
283
],
[
427,
453
],
[
1668,
1694
],
[
2084,
2110
],
[
2929,
2955
],
[
3397,
3423
]
],
[
[
336,
362
],
[
468,
494
],
[
1709,
1735
],
[
2125,
2151
],
[
2970,
2996
],
[
3438,
3464
]
],
[
[
384,
392
],
[
1418,
1426
],
[
1481,
1489
],
[
1777,
1785
],
[
3038,
3046
]
],
[
[
398,
409
],
[
1842,
1853
],
[
3103,
3114
]
],
[
[
914,
929
],
[
1592,
1607
]
],
[
[
1370,
1385
],
[
2021,
2036
],
[
3325,
3340
]
],
[
[
1637,
1650
],
[
2772,
2785
]
],
[
[
2062,
2066
]
],
[
[
2885,
3359
],
[
4095,
4111
]
],
[
[
3362,
4216
]
]
] |
# Generated by Django 3.0.5 on 2020-04-14 19:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0006_auto_20200414_2235'),
]
operations = [
migrations.AddField(
model_name='workshop',
name='name',
field=models.CharField(default='kkkk', max_length=100, verbose_name='Название'),
),
]
| [
[
[
71,
81
],
[
108,
118
],
[
237,
247
]
],
[
[
83,
89
],
[
336,
342
]
],
[
[
98,
107
]
]
] |
import gettext
import os
import re
from datetime import datetime, timedelta
from importlib import import_module
import pytz
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import (
CharField, DateField, DateTimeField, ManyToManyField, UUIDField,
)
from django.test import SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from .models import (
Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual,
Inventory, Member, MyFileField, Profile, School, Student,
UnsafeLimitChoicesTo, VideoStream,
)
from .widgetadmin import site as widget_admin_site
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None)
cls.u2 = User.objects.create_user(username='testser', password='secret')
Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat')
Car.objects.create(owner=cls.u2, make='BMW', model='M3')
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
self.assertIsInstance(widget, widgetclass)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertIsNone(ff.empty_label)
def test_many_to_many(self):
self.assertFormfield(Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(Band, admin.site)
f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_formfield_overrides_m2m_filter_widget(self):
"""
The autocomplete_fields, raw_id_fields, filter_vertical, and
filter_horizontal widgets for ManyToManyFields may be overridden by
specifying a widget in formfield_overrides.
"""
class BandAdmin(admin.ModelAdmin):
filter_vertical = ['members']
formfield_overrides = {
ManyToManyField: {'widget': forms.CheckboxSelectMultiple},
}
ma = BandAdmin(Band, admin.site)
field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None)
self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple)
def test_formfield_overrides_for_datetime_field(self):
"""
Overriding the widget for DateTimeField doesn't overrides the default
form_class for that field (#26449).
"""
class MemberAdmin(admin.ModelAdmin):
formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}}
ma = MemberAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None)
self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime)
self.assertIsInstance(f1, forms.SplitDateTimeField)
def test_formfield_overrides_for_custom_field(self):
"""
formfield_overrides works for a custom field class.
"""
class AlbumAdmin(admin.ModelAdmin):
formfield_overrides = {MyFileField: {'widget': forms.TextInput()}}
ma = AlbumAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None)
self.assertIsInstance(f1.widget, forms.TextInput)
def test_field_with_choices(self):
self.assertFormfield(Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(Advisor, admin.site)
f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None)
self.assertEqual(
f.help_text,
'Hold down “Control”, or “Command” on a Mac, to select more than one.'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagen Passat")
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_nonexistent_target_id(self):
band = Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": str(pk),
}
# Try posting with a nonexistent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str})
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
def test_label_and_url_for_value_invalid_uuid(self):
field = Bee._meta.get_field('honeycomb')
self.assertIsInstance(field.target_field, UUIDField)
widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site)
self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', ''))
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilter" '
'data-field-name="test\\" data-is-stacked="0">\n</select>'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilterstacked" '
'data-field-name="test\\" data-is-stacked="1">\n</select>'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Date: <input value="2007-12-01" type="text" class="vDateField" '
'name="test_0" size="10"><br>'
'Time: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Datum: <input value="01.12.2007" type="text" '
'class="vDateField" name="test_0"size="10"><br>'
'Zeit: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
class AdminURLWidgetTest(SimpleTestCase):
def test_get_context_validates_url(self):
w = widgets.AdminURLFieldWidget()
for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']:
with self.subTest(url=invalid):
self.assertFalse(w.get_context('name', invalid, {})['url_valid'])
self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid'])
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url">'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">'
'http://example.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example.com"></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">'
'http://example-äüö.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com"></p>'
)
def test_render_quoting(self):
"""
WARNING: This test doesn't use assertHTMLEqual since it will get rid
of some escapes which are tested here!
"""
HREF_RE = re.compile('href="([^"]+)"')
VALUE_RE = re.compile('value="([^"]+)"')
TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>')
w = widgets.AdminURLFieldWidget()
output = w.render('test', 'http://example.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output)[1],
'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://example.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://example.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output)[1],
'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"')
self.assertEqual(
HREF_RE.search(output)[1],
'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"'
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"',
)
class AdminUUIDWidgetTests(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminUUIDInputWidget()
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">',
)
w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'})
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">',
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
band = Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id"> '
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test">',
)
def test_render_required(self):
widget = widgets.AdminFileWidget()
widget.is_required = True
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_render_disabled(self):
widget = widgets.AdminFileWidget(attrs={'disabled': True})
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" disabled>'
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test" disabled></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art">',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<div class="readonly"></div>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.uuid, attrs={}),
'<input type="text" name="test" value="%(banduuid)s" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/band/?_to_field=uuid" class="related-lookup" '
'id="lookup_id_test" title="Lookup"></a> <strong>'
'<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>'
'</strong>' % {'banduuid': band.uuid, 'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# ForeignKeyRawIdWidget works with fields which aren't related to
# the model's primary key.
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}),
'<input type="text" name="test" value="86" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Apple</a></strong>' % {'pk': apple.pk}
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s">'
' <strong>%(hcomb)s</strong>'
% {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = Individual.objects.create(name='Subject #1')
Individual.objects.create(name='Child', parent=subject1)
rel = Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s">'
' <strong>%(subj1)s</strong>'
% {'subj1pk': subject1.pk, 'subj1': subject1}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}),
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Hidden</a></strong>' % {'pk': hidden.pk}
)
def test_render_unsafe_limit_choices_to(self):
rel = UnsafeLimitChoicesTo._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', None),
'<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n'
'<a href="/admin_widgets/band/?name=%22%26%3E%3Cescapeme&_to_field=artist_ptr" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
)
def test_render_fk_as_pk_model(self):
rel = VideoStream._meta.get_field('release_event').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', None),
'<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n'
'<a href="/admin_widgets/releaseevent/?_to_field=album" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
m1 = Member.objects.create(name='Chester')
m2 = Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk, 'm2pk': m2.pk}
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk}
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = Advisor.objects.create(name='Rockstar Techie')
c1 = Company.objects.create(name='Doodle')
c2 = Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk}
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_custom_widget_render(self):
class CustomWidget(forms.Select):
def render(self, *args, **kwargs):
return 'custom render output'
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
output = wrapper.render('name', 'value')
self.assertIn('custom render output', output)
def test_widget_delegates_value_omitted_from_data(self):
class CustomWidget(forms.Select):
def value_omitted_from_data(self, data, files, name):
return False
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False)
def test_widget_is_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.HiddenInput()
widget.choices = ()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, True)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], True)
output = wrapper.render('name', 'value')
# Related item links are hidden.
self.assertNotIn('<a ', output)
def test_widget_is_not_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, False)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], False)
output = wrapper.render('name', 'value')
# Related item links are present.
self.assertIn('<a ', output)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.u1 = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase):
def test_show_hide_date_time_picker_widgets(self):
"""
Pressing the ESC key or clicking on a widget value closes the date and
time picker widgets.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# First, with the date picker widget ---------------------------------
cal_icon = self.selenium.find_element_by_id('calendarlink0')
# The date picker is hidden
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon
cal_icon.click()
# The date picker is visible
self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The date picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon, then on the 15th of current month
cal_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click()
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'),
datetime.today().strftime('%Y-%m-') + '15',
)
# Then, with the time picker widget ----------------------------------
time_icon = self.selenium.find_element_by_id('clocklink0')
# The time picker is hidden
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon
time_icon.click()
# The time picker is visible
self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The time picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon, then select the 'Noon' value
time_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click()
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'),
'12:00:00',
)
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
The calendar shows the date from the input field for every locale
supported by Django.
"""
self.selenium.set_window_size(1024, 768)
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month name translations for every locale
month_string = 'May'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except OSError:
continue
if month_string in catalog._catalog:
month_name = catalog._catalog[month_string]
else:
month_name = month_string
# Get the expected caption
may_translation = month_name
expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
url = reverse('admin:admin_widgets_member_change', args=(member.pk,))
self.selenium.get(self.live_server_url + url)
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Make sure that the right month and year are displayed
self.wait_for_text('#calendarin0 caption', expected_caption)
@override_settings(TIME_ZONE='Asia/Singapore')
class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase):
def test_date_time_picker_shortcuts(self):
"""
date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts')
now = datetime.now()
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# There is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning')
# Submit the form.
with self.wait_page_loaded():
self.selenium.find_element_by_name('_save').click()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
# The above tests run with Asia/Singapore which are on the positive side of
# UTC. Here we test with a timezone on the negative side.
@override_settings(TIME_ZONE='US/Eastern')
class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests):
pass
class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
self.lisa = Student.objects.create(name='Lisa')
self.john = Student.objects.create(name='John')
self.bob = Student.objects.create(name='Bob')
self.peter = Student.objects.create(name='Peter')
self.jenny = Student.objects.create(name='Jenny')
self.jason = Student.objects.create(name='Jason')
self.cliff = Student.objects.create(name='Cliff')
self.arthur = Student.objects.create(name='Arthur')
self.school = School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
original_url = self.selenium.current_url
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(from_box, self.lisa.id)
)
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
self.select_option(from_box, str(self.lisa.id))
self.select_option(from_box, str(self.jason.id))
self.select_option(from_box, str(self.bob.id))
self.select_option(from_box, str(self.john.id))
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id),
])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(to_box, self.lisa.id)
)
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.select_option(to_box, str(self.lisa.id))
self.select_option(to_box, str(self.bob.id))
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)
])
self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.arthur.id))
self.select_option(from_box, str(self.cliff.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id),
])
self.assertSelectOptions(to_box, [
str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id),
])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.peter.id))
self.select_option(from_box, str(self.lisa.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(from_box, str(self.peter.id))
self.deselect_option(from_box, str(self.lisa.id))
# Choose some more options --------------------------------------------
self.select_option(to_box, str(self.jason.id))
self.select_option(to_box, str(self.john.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(to_box, str(self.jason.id))
self.deselect_option(to_box, str(self.john.id))
# Pressing buttons shouldn't change the URL.
self.assertEqual(self.selenium.current_url, original_url)
def test_basic(self):
self.selenium.set_window_size(1024, 768)
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
self.wait_page_ready()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_ready()
self.school = School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Typing in the search box filters out options displayed in the 'from'
box.
"""
from selenium.webdriver.common.keys import Keys
self.selenium.set_window_size(1024, 768)
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
input = self.selenium.find_element_by_id('id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
# -----------------------------------------------------------------
# Choosing a filtered option sends it properly to the 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.select_option(from_box, str(self.jason.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id), str(self.jason.id),
])
self.select_option(to_box, str(self.lisa.id))
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id),
])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Pressing enter on a filtered option sends it properly to
# the 'to' box.
self.select_option(to_box, str(self.jason.id))
self.selenium.find_element_by_id(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.school = School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()), [self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])
def test_back_button_bug(self):
"""
Some browsers had a bug where navigating away from the change page
and then clicking the browser's back button would clear the
filter_horizontal/filter_vertical widgets (#13614).
"""
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
# Navigate away and go back to the change form page.
self.selenium.find_element_by_link_text('Home').click()
self.selenium.back()
expected_unselected_values = [
str(self.arthur.id), str(self.bob.id), str(self.cliff.id),
str(self.jason.id), str(self.jenny.id), str(self.john.id),
]
expected_selected_values = [str(self.lisa.id), str(self.peter.id)]
# Everything is still in place
self.assertSelectOptions('#id_students_from', expected_unselected_values)
self.assertSelectOptions('#id_students_to', expected_selected_values)
self.assertSelectOptions('#id_alumni_from', expected_unselected_values)
self.assertSelectOptions('#id_alumni_to', expected_selected_values)
def test_refresh_page(self):
"""
Horizontal and vertical filter widgets keep selected options on page
reload (#22955).
"""
self.school.students.add(self.arthur, self.jason)
self.school.alumni.add(self.arthur, self.jason)
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
# self.selenium.refresh() or send_keys(Keys.F5) does hard reload and
# doesn't replicate what happens when a user clicks the browser's
# 'Refresh' button.
with self.wait_page_loaded():
self.selenium.execute_script("location.reload()")
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
Band.objects.create(id=42, name='Bogey Blues')
Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '')
# Help text for the field is displayed
self.assertEqual(
self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text,
'Supporting Bands.'
)
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add'))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.wait_for_and_switch_to_popup()
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.selenium.find_element_by_css_selector('#id_user option[value=newuser]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.wait_for_and_switch_to_popup()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.')
profiles = Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
| [
[
[
7,
14
],
[
39487,
39494
]
],
[
[
22,
24
],
[
39292,
39294
],
[
39305,
39307
]
],
[
[
32,
34
],
[
15921,
15923
],
[
15969,
15971
],
[
16017,
16019
]
],
[
[
56,
64
],
[
12213,
12221
],
[
12519,
12527
],
[
12824,
12832
],
[
13127,
13135
],
[
13443,
13451
],
[
14009,
14017
],
[
34597,
34605
],
[
39157,
39165
],
[
41292,
41300
],
[
41890,
41898
]
],
[
[
66,
75
],
[
41133,
41142
],
[
41349,
41358
],
[
41425,
41434
],
[
41536,
41545
]
],
[
[
98,
111
],
[
39321,
39334
]
],
[
[
120,
124
],
[
41241,
41245
],
[
41305,
41309
]
],
[
[
145,
150
],
[
3542,
3547
],
[
4058,
4063
],
[
4524,
4529
],
[
4613,
4618
],
[
4937,
4942
],
[
5870,
5875
],
[
6095,
6100
],
[
6703,
6708
],
[
6974,
6979
],
[
7170,
7175
],
[
7274,
7279
],
[
29654,
29659
],
[
30193,
30198
],
[
30646,
30651
],
[
31250,
31255
],
[
31723,
31728
],
[
32254,
32259
]
],
[
[
175,
183
],
[
39424,
39432
]
],
[
[
211,
216
],
[
1834,
1839
],
[
2053,
2058
],
[
3919,
3924
],
[
4844,
4849
],
[
5019,
5024
],
[
5729,
5734
],
[
5944,
5949
],
[
6357,
6362
],
[
6499,
6504
],
[
6896,
6901
],
[
7026,
7031
],
[
7461,
7466
],
[
7719,
7724
],
[
7961,
7966
],
[
11131,
11136
]
],
[
[
250,
257
],
[
2234,
2241
],
[
2569,
2576
],
[
2679,
2686
],
[
2788,
2795
],
[
2895,
2902
],
[
2998,
3005
],
[
3108,
3115
],
[
3217,
3224
],
[
3325,
3332
],
[
3436,
3443
],
[
3644,
3651
],
[
3832,
3839
],
[
4167,
4174
],
[
4343,
4350
],
[
6437,
6444
],
[
6641,
6648
],
[
7382,
7389
],
[
7563,
7570
],
[
7834,
7841
],
[
10321,
10328
],
[
10407,
10414
],
[
10709,
10716
],
[
10789,
10796
],
[
11081,
11088
],
[
11395,
11402
],
[
11777,
11784
],
[
12128,
12135
],
[
12392,
12399
],
[
12739,
12746
],
[
13000,
13007
],
[
13355,
13362
],
[
13804,
13811
],
[
14450,
14457
],
[
14816,
14823
],
[
15345,
15352
],
[
16063,
16070
],
[
17928,
17935
],
[
18192,
18199
],
[
18882,
18889
],
[
19680,
19687
],
[
20171,
20178
],
[
22086,
22093
],
[
23108,
23115
],
[
24004,
24011
],
[
24745,
24752
],
[
25237,
25244
],
[
26134,
26141
],
[
26648,
26655
],
[
27395,
27402
],
[
28631,
28638
],
[
29316,
29323
],
[
29397,
29404
],
[
29695,
29702
],
[
30226,
30233
],
[
30861,
30868
],
[
31467,
31474
],
[
31789,
31796
],
[
32287,
32294
]
],
[
[
297,
318
],
[
32726,
32747
],
[
32792,
32813
]
],
[
[
358,
362
],
[
1063,
1067
],
[
1159,
1163
],
[
32869,
32873
]
],
[
[
401,
416
],
[
19419,
19434
],
[
20067,
20082
],
[
20754,
20769
],
[
21300,
21315
]
],
[
[
460,
478
],
[
19528,
19546
]
],
[
[
514,
523
],
[
4915,
4924
]
],
[
[
525,
534
],
[
4591,
4600
]
],
[
[
536,
549
],
[
6411,
6424
]
],
[
[
551,
566
],
[
5842,
5857
]
],
[
[
568,
577
],
[
11053,
11062
]
],
[
[
605,
619
],
[
1409,
1423
],
[
11267,
11281
],
[
12073,
12087
],
[
12684,
12698
],
[
13299,
13313
],
[
14375,
14389
],
[
17873,
17887
],
[
29184,
29198
]
],
[
[
621,
629
],
[
8324,
8332
],
[
8838,
8846
],
[
9205,
9213
],
[
18573,
18581
],
[
21809,
21817
],
[
27096,
27104
]
],
[
[
631,
648
],
[
8209,
8226
],
[
8732,
8749
],
[
9104,
9121
],
[
18478,
18495
],
[
21724,
21741
],
[
27011,
27028
],
[
29094,
29111
],
[
32639,
32656
],
[
40524,
40541
],
[
42913,
42930
],
[
39981,
39998
]
],
[
[
673,
680
],
[
8575,
8582
],
[
8997,
9004
],
[
9666,
9673
],
[
10056,
10063
],
[
21026,
21033
],
[
21551,
21558
],
[
33462,
33469
],
[
36340,
36347
],
[
37295,
37302
],
[
38264,
38271
],
[
40127,
40134
],
[
41605,
41612
],
[
51766,
51773
],
[
52971,
52978
],
[
57255,
57262
],
[
58533,
58540
],
[
59610,
59617
],
[
60997,
61004
],
[
62717,
62724
]
],
[
[
706,
717
],
[
13877,
13888
]
],
[
[
745,
752
],
[
7812,
7819
],
[
7952,
7959
],
[
8010,
8017
],
[
28365,
28372
],
[
28568,
28575
]
],
[
[
754,
759
],
[
3416,
3421
],
[
7076,
7081
],
[
7540,
7545
],
[
22030,
22035
],
[
30768,
30773
],
[
31374,
31379
],
[
31663,
31668
],
[
32194,
32199
]
],
[
[
761,
765
],
[
4041,
4045
],
[
4150,
4154
],
[
4326,
4330
],
[
5013,
5017
],
[
5069,
5073
],
[
5151,
5155
],
[
5938,
5942
],
[
5997,
6001
],
[
9344,
9348
],
[
18677,
18681
],
[
21863,
21867
],
[
27150,
27154
],
[
27337,
27341
],
[
59347,
59351
],
[
59402,
59406
]
],
[
[
767,
770
],
[
10970,
10973
],
[
23945,
23948
]
],
[
[
772,
775
],
[
1231,
1234
],
[
1314,
1317
]
],
[
[
777,
784
],
[
28426,
28433
],
[
28477,
28484
]
],
[
[
786,
791
],
[
2548,
2553
],
[
2767,
2772
],
[
2873,
2878
],
[
2983,
2988
],
[
3090,
3095
],
[
3522,
3527
],
[
3624,
3629
],
[
3812,
3817
],
[
4503,
4508
]
],
[
[
793,
802
],
[
23846,
23855
]
],
[
[
804,
814
],
[
24558,
24568
],
[
24611,
24621
],
[
24682,
24692
],
[
29254,
29264
],
[
29587,
29597
],
[
30124,
30134
]
],
[
[
820,
829
],
[
22823,
22832
],
[
22882,
22891
],
[
22947,
22956
],
[
23047,
23056
],
[
25176,
25185
],
[
25309,
25318
],
[
25422,
25431
]
],
[
[
831,
837
],
[
2658,
2664
],
[
3201,
3207
],
[
3308,
3314
],
[
6491,
6497
],
[
6549,
6555
],
[
7018,
7024
],
[
7256,
7262
],
[
7364,
7370
],
[
27204,
27210
],
[
27255,
27261
],
[
39113,
39119
],
[
42617,
42623
]
],
[
[
839,
850
],
[
6950,
6961
]
],
[
[
852,
859
],
[
64513,
64520
]
],
[
[
861,
867
],
[
43650,
43656
],
[
52204,
52210
],
[
56549,
56555
]
],
[
[
869,
876
],
[
43190,
43197
],
[
43246,
43253
],
[
43301,
43308
],
[
43357,
43364
],
[
43415,
43422
],
[
43473,
43480
],
[
43531,
43538
],
[
43590,
43597
]
],
[
[
882,
902
],
[
26064,
26084
]
],
[
[
904,
915
],
[
26578,
26589
]
],
[
[
944,
969
],
[
22121,
22138
],
[
23143,
23160
],
[
24039,
24056
],
[
24780,
24797
],
[
25272,
25289
],
[
26169,
26186
],
[
26683,
26700
],
[
27430,
27447
],
[
28666,
28683
],
[
29439,
29456
],
[
29755,
29772
],
[
30286,
30303
],
[
30921,
30938
],
[
31514,
31531
],
[
31836,
31853
],
[
32334,
32351
]
],
[
[
978,
991
],
[
8309,
8322
],
[
8823,
8836
],
[
9190,
9203
],
[
18558,
18571
]
],
[
[
1379,
1408
]
],
[
[
8268,
8308
]
],
[
[
8791,
8822
]
],
[
[
9163,
9189
]
],
[
[
11234,
11266
]
],
[
[
12053,
12072
]
],
[
[
12664,
12683
]
],
[
[
13270,
13298
]
],
[
[
14356,
14374
]
],
[
[
17852,
17872
]
],
[
[
18537,
18557
]
],
[
[
21783,
21808
]
],
[
[
27070,
27095
]
],
[
[
29153,
29183
]
],
[
[
32698,
32725
],
[
32999,
33026
],
[
40613,
40640
],
[
43094,
43121
],
[
59263,
59290
],
[
62514,
62541
]
],
[
[
32971,
32998
]
],
[
[
40576,
40612
],
[
43000,
43036
]
],
[
[
42961,
42999
]
],
[
[
43056,
43093
]
],
[
[
59233,
59262
]
],
[
[
62482,
62513
]
]
] |
# -*- coding: utf-8 -*-
# This file as well as the whole tsfresh package are licenced under the MIT licence (see the LICENCE.txt)
# Maximilian Christ (maximilianchrist.com), Blue Yonder Gmbh, 2016
"""
Contains a feature selection method that evaluates the importance of the different extracted features. To do so,
for every feature the influence on the target is evaluated by an univariate tests and the p-Value is calculated.
The methods that calculate the p-values are called feature selectors.
Afterwards the Benjamini Hochberg procedure which is a multiple testing procedure decides which features to keep and
which to cut off (solely based on the p-values).
"""
from __future__ import absolute_import, division, print_function
from functools import partial
from builtins import zip
from builtins import range
import os
import numpy as np
import pandas as pd
import logging
from multiprocessing import Pool
from tsfresh.feature_selection.significance_tests import target_binary_feature_real_test, \
target_real_feature_binary_test, target_real_feature_real_test, target_binary_feature_binary_test
from tsfresh import defaults
_logger = logging.getLogger(__name__)
def check_fs_sig_bh(X, y,
n_processes=defaults.N_PROCESSES,
chunksize=defaults.CHUNKSIZE,
fdr_level=defaults.FDR_LEVEL,
hypotheses_independent=defaults.HYPOTHESES_INDEPENDENT,
test_for_binary_target_real_feature=defaults.TEST_FOR_BINARY_TARGET_REAL_FEATURE):
"""
The wrapper function that calls the significance test functions in this package.
In total, for each feature from the input pandas.DataFrame an univariate feature significance test is conducted.
Those tests generate p values that are then evaluated by the Benjamini Hochberg procedure to decide which features
to keep and which to delete.
We are testing
:math:`H_0` = the Feature is not relevant and can not be added
against
:math:`H_1` = the Feature is relevant and should be kept
or in other words
:math:`H_0` = Target and Feature are independent / the Feature has no influence on the target
:math:`H_1` = Target and Feature are associated / dependent
When the target is binary this becomes
:math:`H_0 = \\left( F_{\\text{target}=1} = F_{\\text{target}=0} \\right)`
:math:`H_1 = \\left( F_{\\text{target}=1} \\neq F_{\\text{target}=0} \\right)`
Where :math:`F` is the distribution of the target.
In the same way we can state the hypothesis when the feature is binary
:math:`H_0 = \\left( T_{\\text{feature}=1} = T_{\\text{feature}=0} \\right)`
:math:`H_1 = \\left( T_{\\text{feature}=1} \\neq T_{\\text{feature}=0} \\right)`
Here :math:`T` is the distribution of the target.
TODO: And for real valued?
:param X: The DataFrame containing all the features and the target
:type X: pandas.DataFrame
:param y: The target vector
:type y: pandas.Series
:param test_for_binary_target_real_feature: Which test to be used for binary target, real feature
:type test_for_binary_target_real_feature: str
:param fdr_level: The FDR level that should be respected, this is the theoretical expected percentage of irrelevant
features among all created features.
:type fdr_level: float
:param hypotheses_independent: Can the significance of the features be assumed to be independent?
Normally, this should be set to False as the features are never
independent (e.g. mean and median)
:type hypotheses_independent: bool
:param n_processes: Number of processes to use during the p-value calculation
:type n_processes: int
:param chunksize: Size of the chunks submitted to the worker processes
:type chunksize: int
:return: A pandas.DataFrame with each column of the input DataFrame X as index with information on the significance
of this particular feature. The DataFrame has the columns
"Feature",
"type" (binary, real or const),
"p_value" (the significance of this feature as a p-value, lower means more significant)
"rejected" (if the Benjamini Hochberg procedure rejected this feature)
:rtype: pandas.DataFrame
"""
target_is_binary = len(set(y)) == 2
# todo: solve the multiclassification case. for a multi classification the algorithm considers the target to be
# regression. Instead one could perform a binary one versus all classification.
# Only allow entries for which the target is known!
y = y.astype(np.float)
X = X.copy().loc[~(y == np.NaN), :]
# Create the DataFrame df_features containing the information about the different hypotheses
# Every row contains information over one feature column from X
df_features = pd.DataFrame()
df_features['Feature'] = list(set(X.columns))
df_features = df_features.set_index('Feature', drop=False)
# Add relevant columns to df_features
df_features["rejected"] = np.nan
df_features["type"] = np.nan
df_features["p_value"] = np.nan
# Calculate the feature significance in parallel
pool = Pool(n_processes)
# Helper function which wrapps the _calculate_p_value with many arguments already set
f = partial(_calculate_p_value, y=y,
target_is_binary=target_is_binary,
test_for_binary_target_real_feature=test_for_binary_target_real_feature)
results = pool.map(f, [X[feature] for feature in df_features['Feature']], chunksize=chunksize)
p_values_of_features = pd.DataFrame(results)
df_features.update(p_values_of_features)
pool.close()
pool.join()
# Perform the real feature rejection
if "const" in set(df_features.type):
df_features_bh = benjamini_hochberg_test(df_features.loc[~(df_features.type == "const")],
hypotheses_independent, fdr_level)
df_features = pd.concat([df_features_bh, df_features.loc[df_features.type == "const"]])
else:
df_features = benjamini_hochberg_test(df_features, hypotheses_independent, fdr_level)
# It is very important that we have a boolean "rejected" column, so we do a cast here to be sure
df_features["rejected"] = df_features["rejected"].astype("bool")
if defaults.WRITE_SELECTION_REPORT:
# Write results of BH - Test to file
if not os.path.exists(defaults.RESULT_DIR):
os.mkdir(defaults.RESULT_DIR)
with open(os.path.join(defaults.RESULT_DIR, "fs_bh_results.txt"), 'w') as file_out:
file_out.write(("Performed BH Test to control the false discovery rate(FDR); \n"
"FDR-Level={0};Hypothesis independent={1}\n"
).format(fdr_level, hypotheses_independent))
df_features.to_csv(index=False, path_or_buf=file_out, sep=';', float_format='%.4f')
return df_features
def _calculate_p_value(feature_column, y, target_is_binary, test_for_binary_target_real_feature):
"""
Internal helper function to calculate the p-value of a given feature using one of the dedicated
functions target_*_feature_*_test.
:param feature_column: the feature column.
:type feature_column: pandas.Series
:param y: the binary target vector
:type y: pandas.Series
:param target_is_binary: Whether the target is binary or not
:type target_is_binary: bool
:param test_for_binary_target_real_feature: The significance test to be used for binary target and real valued
features. Either ``'mann'`` for the Mann-Whitney-U test or ``'smir'``
for the Kolmogorov-Smirnov test.
:type test_for_binary_target_real_feature: str
:return: the p-value of the feature significance test and the type of the tested feature as a Series.
Lower p-values indicate a higher feature significance.
:rtype: pd.Series
"""
# Do not process constant features
if len(pd.unique(feature_column.values)) == 1:
_logger.warning("[test_feature_significance] Feature {} is constant".format(feature_column.name))
return pd.Series({"type": "const", "rejected": False}, name=feature_column.name)
else:
if target_is_binary:
# Decide if the current feature is binary or not
if len(set(feature_column.values)) == 2:
type = "binary"
p_value = target_binary_feature_binary_test(feature_column, y)
else:
type = "real"
p_value = target_binary_feature_real_test(feature_column, y, test_for_binary_target_real_feature)
else:
# Decide if the current feature is binary or not
if len(set(feature_column.values)) == 2:
type = "binary"
p_value = target_real_feature_binary_test(feature_column, y)
else:
type = "real"
p_value = target_real_feature_real_test(feature_column, y)
return pd.Series({"p_value": p_value, "type": type}, name=feature_column.name)
def benjamini_hochberg_test(df_pvalues, hypotheses_independent, fdr_level):
"""
This is an implementation of the benjamini hochberg procedure that calculates which of the hypotheses belonging
to the different p-Values from df_p to reject. While doing so, this test controls the false discovery rate,
which is the ratio of false rejections by all rejections:
.. math::
FDR = \\mathbb{E} \\left [ \\frac{ |\\text{false rejections}| }{ |\\text{all rejections}|} \\right]
References
----------
.. [1] Benjamini, Yoav and Yekutieli, Daniel (2001).
The control of the false discovery rate in multiple testing under dependency.
Annals of statistics, 1165--1188
:param df_pvalues: This DataFrame should contain the p_values of the different hypotheses in a column named
"p_values".
:type df_pvalues: pandas.DataFrame
:param hypotheses_independent: Can the significance of the features be assumed to be independent?
Normally, this should be set to False as the features are never
independent (e.g. mean and median)
:type hypotheses_independent: bool
:param fdr_level: The FDR level that should be respected, this is the theoretical expected percentage of irrelevant
features among all created features.
:type fdr_level: float
:return: The same DataFrame as the input, but with an added boolean column "rejected".
:rtype: pandas.DataFrame
"""
# Get auxiliary variables and vectors
df_pvalues = df_pvalues.sort_values(by="p_value")
m = len(df_pvalues)
K = list(range(1, m + 1))
# Calculate the weight vector C
if hypotheses_independent:
# c(k) = 1
C = [1] * m
else:
# c(k) = \sum_{i=1}^m 1/i
C = [sum([1.0 / i for i in range(1, k + 1)]) for k in K]
# Calculate the vector T to compare to the p_value
T = [fdr_level * k / m * 1.0 / c for k, c in zip(K, C)]
# Get the last rejected p_value
try:
k_max = list(df_pvalues.p_value <= T).index(False)
except ValueError:
k_max = m
# Add the column denoting if hypothesis was rejected
df_pvalues["rejected"] = [True] * k_max + [False] * (m - k_max)
return df_pvalues
| [
[
[
692,
707
]
],
[
[
709,
717
]
],
[
[
719,
733
]
],
[
[
757,
764
],
[
5432,
5439
]
],
[
[
787,
790
],
[
11363,
11366
]
],
[
[
812,
817
],
[
11025,
11030
],
[
11228,
11233
]
],
[
[
825,
827
],
[
6577,
6579
],
[
6626,
6628
],
[
6675,
6677
]
],
[
[
835,
846
],
[
4738,
4740
],
[
4776,
4778
],
[
5174,
5176
],
[
5207,
5209
],
[
5243,
5245
]
],
[
[
854,
866
],
[
4972,
4974
],
[
5731,
5733
],
[
6119,
6121
],
[
8229,
8231
],
[
8390,
8392
],
[
9267,
9269
]
],
[
[
874,
881
],
[
1150,
1157
]
],
[
[
910,
914
],
[
5315,
5319
]
],
[
[
972,
1003
],
[
8803,
8834
]
],
[
[
1011,
1042
],
[
9077,
9108
]
],
[
[
1044,
1073
],
[
9202,
9231
]
],
[
[
1075,
1108
],
[
8676,
8709
]
],
[
[
1129,
1137
],
[
1238,
1246
],
[
1290,
1298
],
[
1340,
1348
],
[
1403,
1411
],
[
1492,
1500
],
[
6484,
6492
],
[
6592,
6600
],
[
6635,
6643
],
[
6688,
6696
]
],
[
[
1140,
1147
],
[
8277,
8284
]
],
[
[
1184,
1199
]
],
[
[
7113,
7131
],
[
5440,
5458
]
],
[
[
9345,
9368
],
[
5940,
5963
],
[
6225,
6248
]
]
] |
# -*- coding: utf-8 -*-
#
# django-faq documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 17 13:09:21 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'django': ('http://django.readthedocs.org/en/latest/', None),
'sphinx': ('http://sphinx.readthedocs.org/en/latest/', None),
}
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'django-faq'
copyright = '2012, Ben Spaulding'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
release = '0.8.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-faqdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-faq.tex', 'django-faq Documentation',
'Ben Spaulding', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-faq', 'django-faq Documentation',
['Ben Spaulding'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| [
[
[
428,
431
]
],
[
[
433,
435
]
],
[
[
1040,
1050
]
],
[
[
1103,
1122
]
],
[
[
1468,
1481
]
],
[
[
1587,
1597
]
],
[
[
1650,
1657
]
],
[
[
1673,
1682
]
],
[
[
1909,
1916
]
],
[
[
1975,
1982
]
],
[
[
2462,
2478
]
],
[
[
3036,
3050
]
],
[
[
3335,
3345
]
],
[
[
5847,
5864
]
],
[
[
6248,
6263
]
],
[
[
7167,
7176
]
],
[
[
7347,
7366
]
]
] |
import importlib
import importlib.util
import json
import os
import signal
import subprocess
import sys
import time
import urllib.request
import pytest
import matplotlib as mpl
# Minimal smoke-testing of the backends for which the dependencies are
# PyPI-installable on CI. They are not available for all tested Python
# versions so we don't fail on missing backends.
def _get_testable_interactive_backends():
backends = []
for deps, backend in [
(["cairo", "gi"], "gtk3agg"),
(["cairo", "gi"], "gtk3cairo"),
(["PyQt5"], "qt5agg"),
(["PyQt5", "cairocffi"], "qt5cairo"),
(["PySide2"], "qt5agg"),
(["PySide2", "cairocffi"], "qt5cairo"),
(["tkinter"], "tkagg"),
(["wx"], "wx"),
(["wx"], "wxagg"),
(["matplotlib.backends._macosx"], "macosx"),
]:
reason = None
missing = [dep for dep in deps if not importlib.util.find_spec(dep)]
if sys.platform == "linux" and not os.environ.get("DISPLAY"):
reason = "$DISPLAY is unset"
elif missing:
reason = "{} cannot be imported".format(", ".join(missing))
elif backend == 'macosx' and os.environ.get('TF_BUILD'):
reason = "macosx backend fails on Azure"
if reason:
backend = pytest.param(
backend,
marks=pytest.mark.skip(
reason=f"Skipping {backend} because {reason}"))
elif backend.startswith('wx') and sys.platform == 'darwin':
# ignore on OSX because that's currently broken (github #16849)
backend = pytest.param(
backend,
marks=pytest.mark.xfail(reason='github #16849'))
backends.append(backend)
return backends
# Using a timer not only allows testing of timers (on other backends), but is
# also necessary on gtk3 and wx, where a direct call to key_press_event("q")
# from draw_event causes breakage due to the canvas widget being deleted too
# early. Also, gtk3 redefines key_press_event with a different signature, so
# we directly invoke it from the superclass instead.
_test_script = """\
import importlib
import importlib.util
import io
import json
import sys
from unittest import TestCase
import matplotlib as mpl
from matplotlib import pyplot as plt, rcParams
from matplotlib.backend_bases import FigureCanvasBase
rcParams.update({
"webagg.open_in_browser": False,
"webagg.port_retries": 1,
})
if len(sys.argv) >= 2: # Second argument is json-encoded rcParams.
rcParams.update(json.loads(sys.argv[1]))
backend = plt.rcParams["backend"].lower()
assert_equal = TestCase().assertEqual
assert_raises = TestCase().assertRaises
if backend.endswith("agg") and not backend.startswith(("gtk3", "web")):
# Force interactive framework setup.
plt.figure()
# Check that we cannot switch to a backend using another interactive
# framework, but can switch to a backend using cairo instead of agg, or a
# non-interactive backend. In the first case, we use tkagg as the "other"
# interactive backend as it is (essentially) guaranteed to be present.
# Moreover, don't test switching away from gtk3 (as Gtk.main_level() is
# not set up at this point yet) and webagg (which uses no interactive
# framework).
if backend != "tkagg":
with assert_raises(ImportError):
mpl.use("tkagg", force=True)
def check_alt_backend(alt_backend):
mpl.use(alt_backend, force=True)
fig = plt.figure()
assert_equal(
type(fig.canvas).__module__,
"matplotlib.backends.backend_{}".format(alt_backend))
if importlib.util.find_spec("cairocffi"):
check_alt_backend(backend[:-3] + "cairo")
check_alt_backend("svg")
mpl.use(backend, force=True)
fig, ax = plt.subplots()
assert_equal(
type(fig.canvas).__module__,
"matplotlib.backends.backend_{}".format(backend))
ax.plot([0, 1], [2, 3])
timer = fig.canvas.new_timer(1.) # Test that floats are cast to int as needed.
timer.add_callback(FigureCanvasBase.key_press_event, fig.canvas, "q")
# Trigger quitting upon draw.
fig.canvas.mpl_connect("draw_event", lambda event: timer.start())
fig.canvas.mpl_connect("close_event", print)
result = io.BytesIO()
fig.savefig(result, format='png')
plt.show()
# Ensure that the window is really closed.
plt.pause(0.5)
# Test that saving works after interactive window is closed, but the figure is
# not deleted.
result_after = io.BytesIO()
fig.savefig(result_after, format='png')
if not backend.startswith('qt5') and sys.platform == 'darwin':
# FIXME: This should be enabled everywhere once Qt5 is fixed on macOS to
# not resize incorrectly.
assert_equal(result.getvalue(), result_after.getvalue())
"""
_test_timeout = 10 # Empirically, 1s is not enough on Travis.
@pytest.mark.parametrize("backend", _get_testable_interactive_backends())
@pytest.mark.parametrize("toolbar", ["toolbar2", "toolmanager"])
@pytest.mark.flaky(reruns=3)
def test_interactive_backend(backend, toolbar):
if backend == "macosx":
if toolbar == "toolmanager":
pytest.skip("toolmanager is not implemented for macosx.")
if toolbar == "toolbar2" and os.environ.get('TRAVIS'):
# See https://github.com/matplotlib/matplotlib/issues/18213
pytest.skip("toolbar2 for macosx is buggy on Travis.")
proc = subprocess.run(
[sys.executable, "-c", _test_script,
json.dumps({"toolbar": toolbar})],
env={**os.environ, "MPLBACKEND": backend, "SOURCE_DATE_EPOCH": "0"},
timeout=_test_timeout,
stdout=subprocess.PIPE, universal_newlines=True)
if proc.returncode:
pytest.fail("The subprocess returned with non-zero exit status "
f"{proc.returncode}.")
assert proc.stdout.count("CloseEvent") == 1
@pytest.mark.skipif('TF_BUILD' in os.environ,
reason="this test fails an azure for unknown reasons")
@pytest.mark.skipif(os.name == "nt", reason="Cannot send SIGINT on Windows.")
def test_webagg():
pytest.importorskip("tornado")
proc = subprocess.Popen([sys.executable, "-c", _test_script],
env={**os.environ, "MPLBACKEND": "webagg",
"SOURCE_DATE_EPOCH": "0"})
url = "http://{}:{}".format(
mpl.rcParams["webagg.address"], mpl.rcParams["webagg.port"])
timeout = time.perf_counter() + _test_timeout
while True:
try:
retcode = proc.poll()
# check that the subprocess for the server is not dead
assert retcode is None
conn = urllib.request.urlopen(url)
break
except urllib.error.URLError:
if time.perf_counter() > timeout:
pytest.fail("Failed to connect to the webagg server.")
else:
continue
conn.close()
proc.send_signal(signal.SIGINT)
assert proc.wait(timeout=_test_timeout) == 0
| [
[
[
7,
16
]
],
[
[
24,
38
],
[
948,
957
]
],
[
[
46,
50
],
[
5528,
5532
]
],
[
[
58,
60
],
[
5952,
5954
],
[
6059,
6061
],
[
1022,
1024
],
[
1221,
1223
],
[
5281,
5283
],
[
5578,
5580
],
[
6272,
6274
]
],
[
[
68,
74
],
[
6986,
6992
]
],
[
[
82,
92
],
[
5458,
5468
],
[
5686,
5696
],
[
6182,
6192
]
],
[
[
100,
103
],
[
990,
993
],
[
1532,
1535
],
[
5483,
5486
],
[
6200,
6203
]
],
[
[
111,
115
],
[
6484,
6488
],
[
6803,
6807
]
],
[
[
123,
137
],
[
6704,
6710
],
[
6765,
6771
]
],
[
[
146,
152
],
[
4894,
4900
],
[
4968,
4974
],
[
5033,
5039
],
[
5919,
5925
],
[
6040,
6046
],
[
1343,
1349
],
[
1404,
1410
],
[
1656,
1662
],
[
1717,
1723
],
[
5186,
5192
],
[
5391,
5397
],
[
5760,
5766
],
[
6140,
6146
],
[
6850,
6856
]
],
[
[
161,
178
],
[
6409,
6412
],
[
6441,
6444
]
],
[
[
378,
412
],
[
4929,
4963
]
],
[
[
2178,
2190
],
[
5505,
5517
],
[
6222,
6234
]
],
[
[
4828,
4841
],
[
5656,
5669
],
[
6506,
6519
],
[
7030,
7043
]
],
[
[
5065,
5089
]
],
[
[
6121,
6132
]
]
] |
TENPOW18 = 10 ** 18
TENPOW6 = 10 ** 6
ZERO_ADDRESS = '0x0000000000000000000000000000000000000000'
ETH_ADDRESS = '0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE'
AUCTION_TOKENS = 10000 * TENPOW18
AUCTION_TIME = 50000
AUCTION_START_PRICE = 100 * TENPOW18
AUCTION_RESERVE = 0.001 * TENPOW18
AUCTION_MINIMUM_COMMITMENT = 10 * TENPOW18
CROWDSALE_TOKENS = 10000 * TENPOW18
CROWDSALE_TOKENS_2 = 10 * TENPOW18
CROWDSALE_TIME = 50000
CROWDSALE_RATE = 0.001 * TENPOW18
CROWDSALE_RATE_2 = 1 * TENPOW18
CROWDSALE_GOAL = 10 * TENPOW18
CROWDSALE_GOAL_2 = 5 * TENPOW18
CROWDSALE_RATE_USDC = 0.0005 * TENPOW6
CROWDSALE_RATE_USDC_2 = 2 * TENPOW6
CROWDSALE_GOAL_USDC = 10 * TENPOW6
CROWDSALE_GOAL_USDC_2 = 5 * TENPOW6
SECONDS_IN_DAY = 24*60*60
TOKENS_TO_MINT = 1000 * TENPOW18
ETH_TO_DEPOSIT = 1 * TENPOW18
POOL_LAUNCH_DEADLINE = 10 * SECONDS_IN_DAY
POOL_LAUNCH_WINDOW = 3 * SECONDS_IN_DAY
POOL_LAUNCH_LOCKTIME = 30 * SECONDS_IN_DAY
POOL_LIQUIDITY_PERCENT = 100
HYPERBOLIC_AUCTION_FACTOR = 2
DOCUMENT_NAME = "MISO"
DOCUMENT_DATA = "MISO: Do you comply?"
USDC_TOKENS = 1000000 * TENPOW18 | [
[
[
0,
8
],
[
184,
192
],
[
242,
250
],
[
277,
285
],
[
320,
328
],
[
357,
365
],
[
392,
400
],
[
450,
458
],
[
482,
490
],
[
514,
522
],
[
546,
554
],
[
755,
763
],
[
785,
793
],
[
1069,
1077
]
],
[
[
20,
27
],
[
587,
594
],
[
623,
630
],
[
659,
666
],
[
695,
702
]
],
[
[
39,
51
]
],
[
[
99,
110
]
],
[
[
159,
173
]
],
[
[
193,
205
]
],
[
[
214,
233
]
],
[
[
251,
266
]
],
[
[
286,
312
]
],
[
[
330,
346
]
],
[
[
366,
384
]
],
[
[
402,
416
]
],
[
[
425,
439
]
],
[
[
459,
475
]
],
[
[
492,
506
]
],
[
[
523,
539
]
],
[
[
556,
575
]
],
[
[
595,
616
]
],
[
[
632,
651
]
],
[
[
667,
688
]
],
[
[
704,
718
],
[
823,
837
],
[
863,
877
],
[
906,
920
]
],
[
[
731,
745
]
],
[
[
764,
778
]
],
[
[
795,
815
]
],
[
[
838,
856
]
],
[
[
878,
898
]
],
[
[
921,
943
]
],
[
[
950,
975
]
],
[
[
981,
994
]
],
[
[
1004,
1017
]
],
[
[
1045,
1056
]
]
] |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.servicecontrol_v1.types import metric_value
from google.cloud.servicecontrol_v1.types import quota_controller
from .transports.base import QuotaControllerTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import QuotaControllerGrpcAsyncIOTransport
from .client import QuotaControllerClient
class QuotaControllerAsyncClient:
"""`Google Quota Control
API <https://cloud.google.com/service-control/overview>`__
Allows clients to allocate and release quota against a `managed
service <https://cloud.google.com/service-management/reference/rpc/google.api/servicemanagement.v1#google.api.servicemanagement.v1.ManagedService>`__.
"""
_client: QuotaControllerClient
DEFAULT_ENDPOINT = QuotaControllerClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = QuotaControllerClient.DEFAULT_MTLS_ENDPOINT
common_billing_account_path = staticmethod(
QuotaControllerClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
QuotaControllerClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(QuotaControllerClient.common_folder_path)
parse_common_folder_path = staticmethod(
QuotaControllerClient.parse_common_folder_path
)
common_organization_path = staticmethod(
QuotaControllerClient.common_organization_path
)
parse_common_organization_path = staticmethod(
QuotaControllerClient.parse_common_organization_path
)
common_project_path = staticmethod(QuotaControllerClient.common_project_path)
parse_common_project_path = staticmethod(
QuotaControllerClient.parse_common_project_path
)
common_location_path = staticmethod(QuotaControllerClient.common_location_path)
parse_common_location_path = staticmethod(
QuotaControllerClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
QuotaControllerAsyncClient: The constructed client.
"""
return QuotaControllerClient.from_service_account_info.__func__(QuotaControllerAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
QuotaControllerAsyncClient: The constructed client.
"""
return QuotaControllerClient.from_service_account_file.__func__(QuotaControllerAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> QuotaControllerTransport:
"""Return the transport used by the client instance.
Returns:
QuotaControllerTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(QuotaControllerClient).get_transport_class, type(QuotaControllerClient)
)
def __init__(
self,
*,
credentials: credentials.Credentials = None,
transport: Union[str, QuotaControllerTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the quota controller client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.QuotaControllerTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = QuotaControllerClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def allocate_quota(
self,
request: quota_controller.AllocateQuotaRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> quota_controller.AllocateQuotaResponse:
r"""Attempts to allocate quota for the specified consumer. It should
be called before the operation is executed.
This method requires the ``servicemanagement.services.quota``
permission on the specified service. For more information, see
`Cloud IAM <https://cloud.google.com/iam>`__.
**NOTE:** The client **must** fail-open on server errors
``INTERNAL``, ``UNKNOWN``, ``DEADLINE_EXCEEDED``, and
``UNAVAILABLE``. To ensure system reliability, the server may
inject these errors to prohibit any hard dependency on the quota
functionality.
Args:
request (:class:`google.cloud.servicecontrol_v1.types.AllocateQuotaRequest`):
The request object. Request message for the
AllocateQuota method.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.servicecontrol_v1.types.AllocateQuotaResponse:
Response message for the
AllocateQuota method.
"""
# Create or coerce a protobuf request object.
request = quota_controller.AllocateQuotaRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.allocate_quota,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-service-control",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("QuotaControllerAsyncClient",)
| [
[
[
626,
637
]
],
[
[
645,
654
],
[
4626,
4635
]
],
[
[
662,
664
]
],
[
[
684,
688
]
],
[
[
690,
698
],
[
7430,
7438
]
],
[
[
700,
705
],
[
7439,
7444
]
],
[
[
707,
711
]
],
[
[
713,
718
],
[
4852,
4857
]
],
[
[
726,
739
],
[
9515,
9528
],
[
9623,
9636
]
],
[
[
748,
795
],
[
4931,
4944
]
],
[
[
840,
850
]
],
[
[
895,
903
],
[
7356,
7364
],
[
9460,
9468
],
[
9685,
9693
],
[
4974,
4982
],
[
9072,
9080
]
],
[
[
948,
964
],
[
7340,
7347
]
],
[
[
1005,
1016
]
],
[
[
1059,
1074
]
],
[
[
1141,
1153
]
],
[
[
1203,
1219
],
[
7471,
7487
],
[
7268,
7284
],
[
8899,
8915
]
],
[
[
1250,
1274
],
[
4363,
4387
],
[
4863,
4887
]
],
[
[
1276,
1295
],
[
5008,
5027
],
[
9217,
9236
]
],
[
[
1333,
1368
]
],
[
[
1389,
1410
],
[
1785,
1806
],
[
1831,
1852
],
[
1898,
1919
],
[
1999,
2020
],
[
2117,
2138
],
[
2218,
2239
],
[
2313,
2334
],
[
2420,
2441
],
[
2532,
2553
],
[
2631,
2652
],
[
2728,
2749
],
[
2823,
2844
],
[
2922,
2943
],
[
4658,
4679
],
[
4707,
4728
],
[
3470,
3491
],
[
4134,
4155
],
[
7023,
7044
]
],
[
[
1419,
1445
],
[
3527,
3553
],
[
4191,
4217
]
],
[
[
9438,
9457
],
[
9217,
9236
]
],
[
[
9663,
9682
],
[
9217,
9236
]
],
[
[
9721,
9728
]
]
] |
# extract from:
# * https://github.com/WebAssembly/design/blob/master/BinaryEncoding.md
# * https://webassembly.github.io/spec/core/binary/instructions.html
# * https://github.com/athre0z/wasm/blob/master/wasm/opcodes.py
from wasm.immtypes import *
from wasm.opcodes import INSN_ENTER_BLOCK, INSN_LEAVE_BLOCK, INSN_BRANCH, INSN_NO_FLOW
"""
TODO: add pop and pushes value per instructions
"""
_table = {
# opcode:(mnemonic/name, imm_struct, flags, pops, pushes, description)
0x00: ('unreachable', None, INSN_NO_FLOW, 0, 0, ''),
0x01: ('nop', None, 0, 0, 0, ''),
0x02: ('block', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x03: ('loop', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x04: ('if', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x05: ('else', None, INSN_ENTER_BLOCK | INSN_LEAVE_BLOCK, 0, 0, ''),
0x0b: ('end', None, INSN_LEAVE_BLOCK, 0, 0, ''),
0x0c: ('br', BranchImm(), INSN_BRANCH, 0, 0, ''),
0x0d: ('br_if', BranchImm(), INSN_BRANCH, 0, 0, ''),
0x0e: ('br_table', BranchTableImm(), INSN_BRANCH, 0, 0, ''),
0x0f: ('return', None, INSN_NO_FLOW, 0, 0, ''),
0x10: ('call', CallImm(), INSN_BRANCH, 0, 0, ''),
0x11: ('call_indirect', CallIndirectImm(), INSN_BRANCH, 0, 0, ''),
0x1a: ('drop', None, 0, 0, 0, ''),
0x1b: ('select', None, 0, 0, 0, ''),
0x20: ('get_local', LocalVarXsImm(), 0, 0, 0, ''),
0x21: ('set_local', LocalVarXsImm(), 0, 0, 0, ''),
0x22: ('tee_local', LocalVarXsImm(), 0, 0, 0, ''),
0x23: ('get_global', GlobalVarXsImm(), 0, 0, 0, ''),
0x24: ('set_global', GlobalVarXsImm(), 0, 0, 0, ''),
0x28: ('i32.load', MemoryImm(), 0, 0, 0, ''),
0x29: ('i64.load', MemoryImm(), 0, 0, 0, ''),
0x2a: ('f32.load', MemoryImm(), 0, 0, 0, ''),
0x2b: ('f64.load', MemoryImm(), 0, 0, 0, ''),
0x2c: ('i32.load8_s', MemoryImm(), 0, 0, 0, ''),
0x2d: ('i32.load8_u', MemoryImm(), 0, 0, 0, ''),
0x2e: ('i32.load16_s', MemoryImm(), 0, 0, 0, ''),
0x2f: ('i32.load16_u', MemoryImm(), 0, 0, 0, ''),
0x30: ('i64.load8_s', MemoryImm(), 0, 0, 0, ''),
0x31: ('i64.load8_u', MemoryImm(), 0, 0, 0, ''),
0x32: ('i64.load16_s', MemoryImm(), 0, 0, 0, ''),
0x33: ('i64.load16_u', MemoryImm(), 0, 0, 0, ''),
0x34: ('i64.load32_s', MemoryImm(), 0, 0, 0, ''),
0x35: ('i64.load32_u', MemoryImm(), 0, 0, 0, ''),
0x36: ('i32.store', MemoryImm(), 0, 0, 0, ''),
0x37: ('i64.store', MemoryImm(), 0, 0, 0, ''),
0x38: ('f32.store', MemoryImm(), 0, 0, 0, ''),
0x39: ('f64.store', MemoryImm(), 0, 0, 0, ''),
0x3a: ('i32.store8', MemoryImm(), 0, 0, 0, ''),
0x3b: ('i32.store16', MemoryImm(), 0, 0, 0, ''),
0x3c: ('i64.store8', MemoryImm(), 0, 0, 0, ''),
0x3d: ('i64.store16', MemoryImm(), 0, 0, 0, ''),
0x3e: ('i64.store32', MemoryImm(), 0, 0, 0, ''),
0x3f: ('current_memory', CurGrowMemImm(), 0, 0, 0, ''),
0x40: ('grow_memory', CurGrowMemImm(), 0, 0, 0, ''),
0x41: ('i32.const', I32ConstImm(), 0, 0, 0, ''),
0x42: ('i64.const', I64ConstImm(), 0, 0, 0, ''),
0x43: ('f32.const', F32ConstImm(), 0, 0, 0, ''),
0x44: ('f64.const', F64ConstImm(), 0, 0, 0, ''),
0x45: ('i32.eqz', None, 0, 0, 0, ''),
0x46: ('i32.eq', None, 0, 0, 0, ''),
0x47: ('i32.ne', None, 0, 0, 0, ''),
0x48: ('i32.lt_s', None, 0, 0, 0, ''),
0x49: ('i32.lt_u', None, 0, 0, 0, ''),
0x4a: ('i32.gt_s', None, 0, 0, 0, ''),
0x4b: ('i32.gt_u', None, 0, 0, 0, ''),
0x4c: ('i32.le_s', None, 0, 0, 0, ''),
0x4d: ('i32.le_u', None, 0, 0, 0, ''),
0x4e: ('i32.ge_s', None, 0, 0, 0, ''),
0x4f: ('i32.ge_u', None, 0, 0, 0, ''),
0x50: ('i64.eqz', None, 0, 0, 0, ''),
0x51: ('i64.eq', None, 0, 0, 0, ''),
0x52: ('i64.ne', None, 0, 0, 0, ''),
0x53: ('i64.lt_s', None, 0, 0, 0, ''),
0x54: ('i64.lt_u', None, 0, 0, 0, ''),
0x55: ('i64.gt_s', None, 0, 0, 0, ''),
0x56: ('i64.gt_u', None, 0, 0, 0, ''),
0x57: ('i64.le_s', None, 0, 0, 0, ''),
0x58: ('i64.le_u', None, 0, 0, 0, ''),
0x59: ('i64.ge_s', None, 0, 0, 0, ''),
0x5a: ('i64.ge_u', None, 0, 0, 0, ''),
0x5b: ('f32.eq', None, 0, 0, 0, ''),
0x5c: ('f32.ne', None, 0, 0, 0, ''),
0x5d: ('f32.lt', None, 0, 0, 0, ''),
0x5e: ('f32.gt', None, 0, 0, 0, ''),
0x5f: ('f32.le', None, 0, 0, 0, ''),
0x60: ('f32.ge', None, 0, 0, 0, ''),
0x61: ('f64.eq', None, 0, 0, 0, ''),
0x62: ('f64.ne', None, 0, 0, 0, ''),
0x63: ('f64.lt', None, 0, 0, 0, ''),
0x64: ('f64.gt', None, 0, 0, 0, ''),
0x65: ('f64.le', None, 0, 0, 0, ''),
0x66: ('f64.ge', None, 0, 0, 0, ''),
0x67: ('i32.clz', None, 0, 0, 0, ''),
0x68: ('i32.ctz', None, 0, 0, 0, ''),
0x69: ('i32.popcnt', None, 0, 0, 0, ''),
0x6a: ('i32.add', None, 0, 0, 0, ''),
0x6b: ('i32.sub', None, 0, 0, 0, ''),
0x6c: ('i32.mul', None, 0, 0, 0, ''),
0x6d: ('i32.div_s', None, 0, 0, 0, ''),
0x6e: ('i32.div_u', None, 0, 0, 0, ''),
0x6f: ('i32.rem_s', None, 0, 0, 0, ''),
0x70: ('i32.rem_u', None, 0, 0, 0, ''),
0x71: ('i32.and', None, 0, 0, 0, ''),
0x72: ('i32.or', None, 0, 0, 0, ''),
0x73: ('i32.xor', None, 0, 0, 0, ''),
0x74: ('i32.shl', None, 0, 0, 0, ''),
0x75: ('i32.shr_s', None, 0, 0, 0, ''),
0x76: ('i32.shr_u', None, 0, 0, 0, ''),
0x77: ('i32.rotl', None, 0, 0, 0, ''),
0x78: ('i32.rotr', None, 0, 0, 0, ''),
0x79: ('i64.clz', None, 0, 0, 0, ''),
0x7a: ('i64.ctz', None, 0, 0, 0, ''),
0x7b: ('i64.popcnt', None, 0, 0, 0, ''),
0x7c: ('i64.add', None, 0, 0, 0, ''),
0x7d: ('i64.sub', None, 0, 0, 0, ''),
0x7e: ('i64.mul', None, 0, 0, 0, ''),
0x7f: ('i64.div_s', None, 0, 0, 0, ''),
0x80: ('i64.div_u', None, 0, 0, 0, ''),
0x81: ('i64.rem_s', None, 0, 0, 0, ''),
0x82: ('i64.rem_u', None, 0, 0, 0, ''),
0x83: ('i64.and', None, 0, 0, 0, ''),
0x84: ('i64.or', None, 0, 0, 0, ''),
0x85: ('i64.xor', None, 0, 0, 0, ''),
0x86: ('i64.shl', None, 0, 0, 0, ''),
0x87: ('i64.shr_s', None, 0, 0, 0, ''),
0x88: ('i64.shr_u', None, 0, 0, 0, ''),
0x89: ('i64.rotl', None, 0, 0, 0, ''),
0x8a: ('i64.rotr', None, 0, 0, 0, ''),
0x8b: ('f32.abs', None, 0, 0, 0, ''),
0x8c: ('f32.neg', None, 0, 0, 0, ''),
0x8d: ('f32.ceil', None, 0, 0, 0, ''),
0x8e: ('f32.floor', None, 0, 0, 0, ''),
0x8f: ('f32.trunc', None, 0, 0, 0, ''),
0x90: ('f32.nearest', None, 0, 0, 0, ''),
0x91: ('f32.sqrt', None, 0, 0, 0, ''),
0x92: ('f32.add', None, 0, 0, 0, ''),
0x93: ('f32.sub', None, 0, 0, 0, ''),
0x94: ('f32.mul', None, 0, 0, 0, ''),
0x95: ('f32.div', None, 0, 0, 0, ''),
0x96: ('f32.min', None, 0, 0, 0, ''),
0x97: ('f32.max', None, 0, 0, 0, ''),
0x98: ('f32.copysign', None, 0, 0, 0, ''),
0x99: ('f64.abs', None, 0, 0, 0, ''),
0x9a: ('f64.neg', None, 0, 0, 0, ''),
0x9b: ('f64.ceil', None, 0, 0, 0, ''),
0x9c: ('f64.floor', None, 0, 0, 0, ''),
0x9d: ('f64.trunc', None, 0, 0, 0, ''),
0x9e: ('f64.nearest', None, 0, 0, 0, ''),
0x9f: ('f64.sqrt', None, 0, 0, 0, ''),
0xa0: ('f64.add', None, 0, 0, 0, ''),
0xa1: ('f64.sub', None, 0, 0, 0, ''),
0xa2: ('f64.mul', None, 0, 0, 0, ''),
0xa3: ('f64.div', None, 0, 0, 0, ''),
0xa4: ('f64.min', None, 0, 0, 0, ''),
0xa5: ('f64.max', None, 0, 0, 0, ''),
0xa6: ('f64.copysign', None, 0, 0, 0, ''),
0xa7: ('i32.wrap/i64', None, 0, 0, 0, ''),
0xa8: ('i32.trunc_s/f32', None, 0, 0, 0, ''),
0xa9: ('i32.trunc_u/f32', None, 0, 0, 0, ''),
0xaa: ('i32.trunc_s/f64', None, 0, 0, 0, ''),
0xab: ('i32.trunc_u/f64', None, 0, 0, 0, ''),
0xac: ('i64.extend_s/i32', None, 0, 0, 0, ''),
0xad: ('i64.extend_u/i32', None, 0, 0, 0, ''),
0xae: ('i64.trunc_s/f32', None, 0, 0, 0, ''),
0xaf: ('i64.trunc_u/f32', None, 0, 0, 0, ''),
0xb0: ('i64.trunc_s/f64', None, 0, 0, 0, ''),
0xb1: ('i64.trunc_u/f64', None, 0, 0, 0, ''),
0xb2: ('f32.convert_s/i32', None, 0, 0, 0, ''),
0xb3: ('f32.convert_u/i32', None, 0, 0, 0, ''),
0xb4: ('f32.convert_s/i64', None, 0, 0, 0, ''),
0xb5: ('f32.convert_u/i64', None, 0, 0, 0, ''),
0xb6: ('f32.demote/f64', None, 0, 0, 0, ''),
0xb7: ('f64.convert_s/i32', None, 0, 0, 0, ''),
0xb8: ('f64.convert_u/i32', None, 0, 0, 0, ''),
0xb9: ('f64.convert_s/i64', None, 0, 0, 0, ''),
0xba: ('f64.convert_u/i64', None, 0, 0, 0, ''),
0xbb: ('f64.promote/f32', None, 0, 0, 0, ''),
0xbc: ('i32.reinterpret/f32', None, 0, 0, 0, ''),
0xbd: ('i64.reinterpret/f64', None, 0, 0, 0, ''),
0xbe: ('f32.reinterpret/i32', None, 0, 0, 0, ''),
0xbf: ('f64.reinterpret/i64', None, 0, 0, 0, ''),
}
class Wasm(object):
"""Wasm bytecode."""
def __init__(self):
self.table = _table
self.reverse_table = self._get_reverse_table()
def _get_reverse_table(self):
"""Build an internal table used in the assembler."""
# opcode:(mnemonic/name, imm_struct, flags, pops, pushes, description)
reverse_table = {}
for (opcode, (mnemonic, imm_struct,
flags, pops, pushes, description)) in self.table.items():
reverse_table[mnemonic] = opcode, mnemonic, imm_struct, flags, pops, pushes, description
return reverse_table
| [
[
[
248,
249
],
[
595,
603
],
[
655,
663
],
[
713,
721
],
[
897,
906
],
[
954,
963
],
[
1014,
1028
],
[
1127,
1134
],
[
1190,
1205
],
[
1339,
1352
],
[
1394,
1407
],
[
1449,
1462
],
[
1505,
1519
],
[
1562,
1576
],
[
1618,
1627
],
[
1668,
1677
],
[
1718,
1727
],
[
1768,
1777
],
[
1821,
1830
],
[
1874,
1883
],
[
1928,
1937
],
[
1982,
1991
],
[
2035,
2044
],
[
2088,
2097
],
[
2142,
2151
],
[
2196,
2205
],
[
2250,
2259
],
[
2304,
2313
],
[
2355,
2364
],
[
2406,
2415
],
[
2457,
2466
],
[
2508,
2517
],
[
2560,
2569
],
[
2613,
2622
],
[
2665,
2674
],
[
2718,
2727
],
[
2771,
2780
],
[
2827,
2840
],
[
2884,
2897
],
[
2940,
2951
],
[
2993,
3004
],
[
3046,
3057
],
[
3099,
3110
]
],
[
[
275,
291
],
[
607,
623
],
[
667,
683
],
[
725,
741
],
[
779,
795
]
],
[
[
293,
309
],
[
798,
814
],
[
851,
867
]
],
[
[
311,
322
],
[
910,
921
],
[
967,
978
],
[
1032,
1043
],
[
1138,
1149
],
[
1209,
1220
]
],
[
[
324,
336
],
[
512,
524
],
[
1083,
1095
]
],
[
[
394,
400
],
[
8690,
8696
]
],
[
[
8605,
8609
]
]
] |
import json
from pytest import raises
from graphql.core import graphql
from graphql.core.error import format_error
from graphql.core.language.location import SourceLocation
from graphql.core.language.parser import parse
from graphql.core.execution import execute
from graphql.core.type import (
GraphQLSchema,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLInputObjectType,
GraphQLInputObjectField,
GraphQLString,
GraphQLList,
GraphQLEnumType,
GraphQLEnumValue,
)
from graphql.core.type.introspection import TypeFieldResolvers
from graphql.core.validation.rules import ProvidedNonNullArguments
introspection_query = '''
query IntrospectionQuery {
__schema {
queryType { name }
mutationType { name }
types {
...FullType
}
directives {
name
args {
name
type { ...TypeRef }
defaultValue
}
onOperation
onFragment
onField
}
}
}
fragment FullType on __Type {
kind
name
fields {
name
args {
name
type { ...TypeRef }
defaultValue
}
type {
...TypeRef
}
isDeprecated
deprecationReason
}
inputFields {
name
type { ...TypeRef }
defaultValue
}
interfaces {
...TypeRef
}
enumValues {
name
isDeprecated
deprecationReason
}
possibleTypes {
...TypeRef
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
'''
def sort_lists(value):
if isinstance(value, dict):
new_mapping = []
for k in sorted(value.keys()):
new_mapping.append((k, sort_lists(value[k])))
return new_mapping
elif isinstance(value, list):
return sorted(map(sort_lists, value), key=repr)
return value
def test_executes_an_introspection_query():
EmptySchema = GraphQLSchema(GraphQLObjectType('QueryRoot', {}))
result = graphql(EmptySchema, introspection_query)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__schema': {'directives': [{'args': [{'defaultValue': None,
'name': 'if',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}}],
'name': 'include',
'onField': True,
'onFragment': True,
'onOperation': False},
{'args': [{'defaultValue': None,
'name': 'if',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}}],
'name': 'skip',
'onField': True,
'onFragment': True,
'onOperation': False}],
'mutationType': None,
'queryType': {'name': 'QueryRoot'},
'types': [{'enumValues': None,
'fields': [],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': 'QueryRoot',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'types',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'queryType',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'mutationType',
'type': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'directives',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Directive'}}}}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Schema',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'kind',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'ENUM',
'name': '__TypeKind',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [{'defaultValue': 'false',
'name': 'includeDeprecated',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'deprecationReason': None,
'isDeprecated': False,
'name': 'fields',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Field',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'interfaces',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'possibleTypes',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}}},
{'args': [{'defaultValue': 'false',
'name': 'includeDeprecated',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'deprecationReason': None,
'isDeprecated': False,
'name': 'enumValues',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__EnumValue',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'inputFields',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'ofType',
'type': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Type',
'possibleTypes': None},
{'enumValues': [{'deprecationReason': None,
'isDeprecated': False,
'name': 'SCALAR'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'OBJECT'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'INTERFACE'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'UNION'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'ENUM'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'INPUT_OBJECT'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'LIST'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'NON_NULL'}],
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'ENUM',
'name': '__TypeKind',
'possibleTypes': None},
{'enumValues': None,
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'SCALAR',
'name': 'String',
'possibleTypes': None},
{'enumValues': None,
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'SCALAR',
'name': 'Boolean',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'args',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'type',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'isDeprecated',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'deprecationReason',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Field',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'type',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'defaultValue',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__InputValue',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'isDeprecated',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'deprecationReason',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__EnumValue',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'args',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onOperation',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onFragment',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onField',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Directive',
'possibleTypes': None}]}})
def test_introspects_on_input_object():
TestInputObject = GraphQLInputObjectType('TestInputObject', {
'a': GraphQLInputObjectField(GraphQLString, default_value='foo'),
'b': GraphQLInputObjectField(GraphQLList(GraphQLString)),
})
TestType = GraphQLObjectType('TestType', {
'field': GraphQLField(
type=GraphQLString,
args={'complex': GraphQLArgument(TestInputObject)},
resolver=lambda obj, args, info: json.dumps(args.get('complex'))
)
})
schema = GraphQLSchema(TestType)
request = '''
{
__schema {
types {
kind
name
inputFields {
name
type { ...TypeRef }
defaultValue
}
}
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists({'kind': 'INPUT_OBJECT',
'name': 'TestInputObject',
'inputFields':
[{'name': 'a',
'type':
{'kind': 'SCALAR',
'name': 'String',
'ofType': None},
'defaultValue': '"foo"'},
{'name': 'b',
'type':
{'kind': 'LIST',
'name': None,
'ofType':
{'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
'defaultValue': None}]}) in \
sort_lists(result.data['__schema']['types'])
def test_supports_the_type_root_field():
TestType = GraphQLObjectType('TestType', {
'testField': GraphQLField(GraphQLString)
})
schema = GraphQLSchema(TestType)
request = '{ __type(name: "TestType") { name } }'
result = execute(schema, object(), parse(request))
assert not result.errors
assert result.data == {'__type': {'name': 'TestType'}}
def test_identifies_deprecated_fields():
TestType = GraphQLObjectType('TestType', {
'nonDeprecated': GraphQLField(GraphQLString),
'deprecated': GraphQLField(GraphQLString,
deprecation_reason='Removed in 1.0')
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestType") {
name
fields(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestType',
'fields': [
{'name': 'nonDeprecated', 'isDeprecated': False, 'deprecationReason': None},
{'name': 'deprecated', 'isDeprecated': True,
'deprecationReason': 'Removed in 1.0'},
]
}})
def test_respects_the_includedeprecated_parameter_for_fields():
TestType = GraphQLObjectType('TestType', {
'nonDeprecated': GraphQLField(GraphQLString),
'deprecated': GraphQLField(GraphQLString,
deprecation_reason='Removed in 1.0')
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestType") {
name
trueFields: fields(includeDeprecated: true) { name }
falseFields: fields(includeDeprecated: false) { name }
omittedFields: fields { name }
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestType',
'trueFields': [{'name': 'nonDeprecated'}, {'name': 'deprecated'}],
'falseFields': [{'name': 'nonDeprecated'}],
'omittedFields': [{'name': 'nonDeprecated'}],
}})
def test_identifies_deprecated_enum_values():
TestEnum = GraphQLEnumType('TestEnum', {
'NONDEPRECATED': 0,
'DEPRECATED': GraphQLEnumValue(1, deprecation_reason='Removed in 1.0'),
'ALSONONDEPRECATED': 2
})
TestType = GraphQLObjectType('TestType', {
'testEnum': GraphQLField(TestEnum)
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestEnum") {
name
enumValues(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestEnum',
'enumValues': [
{'name': 'NONDEPRECATED', 'isDeprecated': False, 'deprecationReason': None},
{'name': 'DEPRECATED', 'isDeprecated': True, 'deprecationReason': 'Removed in 1.0'},
{'name': 'ALSONONDEPRECATED', 'isDeprecated': False, 'deprecationReason': None},
]}})
def test_respects_the_includedeprecated_parameter_for_enum_values():
TestEnum = GraphQLEnumType('TestEnum', {
'NONDEPRECATED': 0,
'DEPRECATED': GraphQLEnumValue(1, deprecation_reason='Removed in 1.0'),
'ALSONONDEPRECATED': 2
})
TestType = GraphQLObjectType('TestType', {
'testEnum': GraphQLField(TestEnum)
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestEnum") {
name
trueValues: enumValues(includeDeprecated: true) { name }
falseValues: enumValues(includeDeprecated: false) { name }
omittedValues: enumValues { name }
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestEnum',
'trueValues': [{'name': 'NONDEPRECATED'}, {'name': 'DEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
'falseValues': [{'name': 'NONDEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
'omittedValues': [{'name': 'NONDEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
}})
def test_fails_as_expected_on_the_type_root_field_without_an_arg():
TestType = GraphQLObjectType('TestType', {
'testField': GraphQLField(GraphQLString)
})
schema = GraphQLSchema(TestType)
request = '''
{
__type {
name
}
}'''
result = graphql(schema, request)
expected_error = {'message': ProvidedNonNullArguments.missing_field_arg_message('__type', 'name', 'String!'),
'locations': [SourceLocation(line=3, column=9)]}
assert (expected_error in [format_error(error) for error in result.errors])
def test_exposes_descriptions_on_types_and_fields():
QueryRoot = GraphQLObjectType('QueryRoot', {})
schema = GraphQLSchema(QueryRoot)
request = '''{
schemaType: __type(name: "__Schema") {
name,
description,
fields {
name,
description
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'schemaType': {
'name': '__Schema',
'description': 'A GraphQL Schema defines the capabilities of a ' +
'GraphQL server. It exposes all available types and ' +
'directives on the server, as well as the entry ' +
'points for query and mutation operations.',
'fields': [
{
'name': 'types',
'description': 'A list of all types supported by this server.'
},
{
'name': 'queryType',
'description': 'The type that query operations will be rooted at.'
},
{
'name': 'mutationType',
'description': 'If this server supports mutation, the type that ' +
'mutation operations will be rooted at.'
},
{
'name': 'directives',
'description': 'A list of all directives supported by this server.'
}
]
}})
def test_exposes_descriptions_on_enums():
QueryRoot = GraphQLObjectType('QueryRoot', {})
schema = GraphQLSchema(QueryRoot)
request = '''{
typeKindType: __type(name: "__TypeKind") {
name,
description,
enumValues {
name,
description
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'typeKindType': {
'name': '__TypeKind',
'description': 'An enum describing what kind of type a given __Type is',
'enumValues': [
{
'description': 'Indicates this type is a scalar.',
'name': 'SCALAR'
},
{
'description': 'Indicates this type is an object. ' +
'`fields` and `interfaces` are valid fields.',
'name': 'OBJECT'
},
{
'description': 'Indicates this type is an interface. ' +
'`fields` and `possibleTypes` are valid fields.',
'name': 'INTERFACE'
},
{
'description': 'Indicates this type is a union. ' +
'`possibleTypes` is a valid field.',
'name': 'UNION'
},
{
'description': 'Indicates this type is an enum. ' +
'`enumValues` is a valid field.',
'name': 'ENUM'
},
{
'description': 'Indicates this type is an input object. ' +
'`inputFields` is a valid field.',
'name': 'INPUT_OBJECT'
},
{
'description': 'Indicates this type is a list. ' +
'`ofType` is a valid field.',
'name': 'LIST'
},
{
'description': 'Indicates this type is a non-null. ' +
'`ofType` is a valid field.',
'name': 'NON_NULL'
}
]
}})
def test_type_field_resolver_resolves_unknown_kind():
class Unk(object):
pass
with raises(ValueError) as excinfo:
TypeFieldResolvers.kind(Unk())
assert 'Unknown kind of type: ' in str(excinfo.value)
| [
[
[
7,
11
],
[
38920,
38924
]
],
[
[
31,
37
],
[
49078,
49084
]
],
[
[
63,
70
],
[
2143,
2150
],
[
39538,
39545
],
[
41239,
41246
],
[
42198,
42205
],
[
43114,
43121
],
[
44227,
44234
],
[
45029,
45036
],
[
45678,
45685
],
[
47174,
47181
]
],
[
[
102,
114
],
[
45270,
45282
]
],
[
[
158,
172
],
[
45204,
45218
]
],
[
[
214,
219
],
[
40619,
40624
]
],
[
[
255,
262
],
[
40593,
40600
]
],
[
[
299,
312
],
[
2079,
2092
],
[
38982,
38995
],
[
40502,
40515
],
[
41009,
41022
],
[
41929,
41942
],
[
42880,
42893
],
[
43946,
43959
],
[
44916,
44929
],
[
45438,
45451
],
[
46926,
46939
]
],
[
[
318,
335
],
[
2093,
2110
],
[
38716,
38733
],
[
40401,
40418
],
[
40781,
40798
],
[
41701,
41718
],
[
42785,
42802
],
[
43851,
43868
],
[
44815,
44832
],
[
45390,
45407
],
[
46878,
46895
]
],
[
[
341,
353
],
[
38765,
38777
],
[
40454,
40466
],
[
40838,
40850
],
[
40889,
40901
],
[
41758,
41770
],
[
41809,
41821
],
[
42837,
42849
],
[
43903,
43915
],
[
44868,
44880
]
],
[
[
359,
374
],
[
38840,
38855
]
],
[
[
380,
402
],
[
38510,
38532
]
],
[
[
408,
431
],
[
38567,
38590
],
[
38641,
38664
]
],
[
[
437,
450
],
[
38591,
38604
],
[
38677,
38690
],
[
38796,
38809
],
[
40467,
40480
],
[
40851,
40864
],
[
40902,
40915
],
[
41771,
41784
],
[
41822,
41835
],
[
44881,
44894
]
],
[
[
456,
467
],
[
38665,
38676
]
],
[
[
473,
488
],
[
42594,
42609
],
[
43660,
43675
]
],
[
[
494,
510
],
[
42674,
42690
],
[
43740,
43756
]
],
[
[
558,
576
],
[
49117,
49135
]
],
[
[
619,
643
],
[
45087,
45111
]
],
[
[
645,
664
],
[
2164,
2183
]
],
[
[
1708,
1718
],
[
1858,
1868
],
[
1968,
1978
],
[
2225,
2235
],
[
2252,
2262
],
[
39603,
39613
],
[
40298,
40308
],
[
41304,
41314
],
[
41331,
41341
],
[
42263,
42273
],
[
42290,
42300
],
[
43179,
43189
],
[
43206,
43216
],
[
44292,
44302
],
[
44319,
44329
],
[
45743,
45753
],
[
45770,
45780
],
[
47239,
47249
],
[
47266,
47276
]
],
[
[
2021,
2057
]
],
[
[
38452,
38484
]
],
[
[
40349,
40382
]
],
[
[
40729,
40762
]
],
[
[
41626,
41682
]
],
[
[
42537,
42575
]
],
[
[
43580,
43641
]
],
[
[
44736,
44796
]
],
[
[
45325,
45370
]
],
[
[
46824,
46858
]
],
[
[
48982,
49028
]
]
] |
# coding=utf-8
import os
import sys
import django
from django.core.urlresolvers import reverse
from django.db import DatabaseError
from django.db.models import Count
from django.http import HttpResponse, Http404
from django.shortcuts import redirect, get_object_or_404
from django.utils import six
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.decorators.csrf import csrf_exempt
from .models import Talk, Photo, Speaker, Event, Tutorial, Vote
from .utils import subscribe_mail, validate_email, set_vote_cookie, can_vote
class IndexPage(ListView):
template_name = 'index.html'
context_object_name = 'events'
def get_queryset(self):
if self.request.user.is_staff:
qs = Event.objects.all()
else:
qs = Event.archived.all()
return qs.prefetch_related('talks', 'talks__speaker', 'talks__event')[:3]
def get_context_data(self, **kwargs):
context = super(IndexPage, self).get_context_data(**kwargs)
# TODO: choose how select people for index page
# I see two options:
# By last talks - Speaker.objects.order_by("-talks__event__id", "talk__position")[:9]
# Random: Speaker.objects.order_by("?")[:9]
context.update({
'speakers': Speaker.objects.order_by("?")[:10],
'main_event': Event.spotlight(self.request.user.is_staff),
'show_more_link': True,
'can_vote': can_vote(self.request)
})
return context
class EventsList(ListView):
template_name = 'event_list.html'
queryset = Event.visible.prefetch_related('talks', 'talks__speaker', 'talks__event')
context_object_name = 'events'
def get_queryset(self):
if self.request.user.is_staff:
qs = Event.objects.all()
else:
qs = Event.visible.all()
return qs.prefetch_related('talks', 'talks__speaker', 'talks__event')
class EventPage(DetailView):
template_name = 'event.html'
slug_url_kwarg = 'number'
slug_field = 'number'
def get_queryset(self):
if self.request.user.is_staff:
return Event.objects.all()
return Event.visible.all()
def get_object(self, queryset=None):
# Use a custom queryset if provided; this is required for subclasses
# like DateDetailView
if queryset is None:
queryset = self.get_queryset()
# Next, try looking up by primary key.
pk = self.kwargs.get(self.pk_url_kwarg)
slug = self.kwargs.get(self.slug_url_kwarg)
if pk is not None:
queryset = queryset.filter(pk=pk)
# Next, try looking up by slug.
if slug is not None and (pk is None or self.query_pk_and_slug):
slug_field = self.get_slug_field()
queryset = queryset.filter(**{slug_field: slug})
# If none of those are defined, it's an error.
if pk is None and slug is None:
raise AttributeError("Generic detail view %s must be called with "
"either an object pk or a slug."
% self.__class__.__name__)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except queryset.model.MultipleObjectsReturned:
obj = queryset.latest("date")
except queryset.model.DoesNotExist:
raise Http404
return obj
def get_context_data(self, **kwargs):
context = super(EventPage, self).get_context_data(**kwargs)
context.update({
'photos': context['event'].photos.all(),
'can_vote': can_vote(self.request),
})
return context
class TalkPage(DetailView):
template_name = 'talk.html'
slug_url_kwarg = 'talk_slug'
def get_queryset(self):
if self.request.user.is_staff:
return Talk.objects.select_related('event', 'speaker')
return Talk.objects.active().select_related('event', 'speaker')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
# Redirect for non-canonic urls (meetup.legacy.urls)
if self.object.get_absolute_url() != request.path:
return redirect(self.object)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class SpeakerList(ListView):
template_name = 'speakers.html'
queryset = Speaker.objects.all().order_by('name')
context_object_name = 'speakers'
class SpeakerPage(DetailView):
template_name = 'speaker.html'
def get_object(self, queryset=None):
return get_object_or_404(
Speaker.objects.prefetch_related('talks', 'talks__event'),
slug=self.kwargs['slug']
)
class AboutPage(TemplateView):
template_name = 'about.html'
def get_context_data(self, **kwargs):
context = super(AboutPage, self).get_context_data(**kwargs)
context.update({
'photos': Photo.objects.all().order_by('-pk')[:10]
})
return context
class LivePage(TemplateView):
template_name = 'live.html'
def get_context_data(self, **kwargs):
context = super(LivePage, self).get_context_data(**kwargs)
context.update({
'event': Event.spotlight(),
})
return context
class TutorialList(ListView):
template_name = 'tutorials.html'
queryset = Tutorial.objects.all().order_by('title')
context_object_name = 'tutorials'
class TutorialPage(DetailView):
template_name = 'tutorial.html'
model = Tutorial
class Py3Page(TemplateView):
template_name = 'py3.html'
def get_context_data(self, **kwargs):
context = super(Py3Page, self).get_context_data(**kwargs)
context.update({
'django': django.get_version(),
'python': sys.version,
'py3': six.PY3,
})
return context
class VoteResults(TemplateView):
template_name = 'vote_results.html'
def get_context_data(self, **kwargs):
context = super(VoteResults, self).get_context_data(**kwargs)
talks = Talk.objects.filter(event=Event.spotlight()).annotate(num_votes=Count("votes"))
talks_votes = [talk.num_votes for talk in talks]
votes_total = sum(talks_votes)
votes_max = max(talks_votes)
if votes_total:
for talk in talks:
talk.votes_percent = int(talk.num_votes * 100 / votes_total)
if talk.num_votes == votes_max:
talk.is_leader = True
context.update({
'talks': talks,
})
return context
@csrf_exempt
def ajax_vote(request, *args, **kwargs):
if request.method == 'POST':
if not can_vote(request):
return HttpResponse(u'Можно голосовать только за один доклад', status=409)
try:
event = Talk.objects.get(pk=kwargs['talk_id']).event
if not event.votable:
return HttpResponse('Voting is closed, sorry', status=409)
Vote.objects.create(talk_id=kwargs['talk_id'],
event=event,
ua=request.META.get('HTTP_USER_AGENT'),
ip=request.META.get('REMOTE_ADDR'))
response = HttpResponse(reverse('vote-results'))
response = set_vote_cookie(response)
return response
except DatabaseError:
return HttpResponse('DB error, sorry', status=402)
return HttpResponse('Only POST', status=402)
def confirm_ownership(request, *args, **kwargs):
content = os.environ.get('CONFIRM_OWNERSHIP_%s' % kwargs['filename'], None)
if content:
content_type = 'text/html' if kwargs['filename'].endswith('.html') else 'text/plain'
return HttpResponse(content, content_type=content_type)
else:
raise Http404
| [
[
[
22,
24
],
[
7773,
7775
]
],
[
[
32,
35
],
[
5984,
5987
]
],
[
[
44,
50
],
[
5940,
5946
]
],
[
[
88,
95
],
[
7464,
7471
]
],
[
[
118,
131
],
[
7581,
7594
]
],
[
[
161,
166
],
[
6327,
6332
]
],
[
[
191,
203
],
[
6929,
6941
],
[
7132,
7144
],
[
7451,
7463
],
[
7615,
7627
],
[
7670,
7682
],
[
7963,
7975
]
],
[
[
205,
212
],
[
3517,
3524
],
[
8036,
8043
]
],
[
[
242,
250
],
[
4343,
4351
]
],
[
[
252,
269
],
[
4757,
4774
]
],
[
[
295,
298
],
[
6016,
6019
]
],
[
[
337,
349
],
[
4912,
4924
],
[
5210,
5222
],
[
5737,
5749
],
[
6079,
6091
]
],
[
[
390,
400
],
[
2042,
2052
],
[
3832,
3842
],
[
4652,
4662
],
[
5651,
5661
]
],
[
[
439,
447
],
[
661,
669
],
[
1617,
1625
],
[
4494,
4502
],
[
5488,
5496
]
],
[
[
489,
500
],
[
6790,
6801
]
],
[
[
522,
526
],
[
3997,
4001
],
[
4060,
4064
],
[
6263,
6267
],
[
7030,
7034
]
],
[
[
528,
533
],
[
5118,
5123
]
],
[
[
535,
542
],
[
4556,
4563
],
[
1374,
1381
],
[
4788,
4795
]
],
[
[
544,
549
],
[
1681,
1686
],
[
825,
830
],
[
876,
881
],
[
1436,
1441
],
[
1875,
1880
],
[
1926,
1931
],
[
2231,
2236
],
[
2266,
2271
],
[
5414,
5419
],
[
6289,
6294
]
],
[
[
551,
559
],
[
5551,
5559
],
[
5712,
5720
]
],
[
[
561,
565
],
[
7196,
7200
]
],
[
[
585,
599
]
],
[
[
601,
615
]
],
[
[
617,
632
],
[
7512,
7527
]
],
[
[
634,
642
],
[
1541,
1549
],
[
3757,
3765
],
[
6891,
6899
]
],
[
[
651,
660
],
[
1047,
1056
]
],
[
[
1606,
1616
]
],
[
[
2032,
2041
],
[
3611,
3620
]
],
[
[
3823,
3831
]
],
[
[
4482,
4493
]
],
[
[
4640,
4651
]
],
[
[
4902,
4911
],
[
5027,
5036
]
],
[
[
5201,
5209
],
[
5324,
5332
]
],
[
[
5475,
5487
]
],
[
[
5638,
5650
]
],
[
[
5729,
5736
],
[
5850,
5857
]
],
[
[
6067,
6078
],
[
6201,
6212
]
],
[
[
6806,
6815
]
],
[
[
7714,
7731
]
]
] |
import unittest
from repeater import repeater
def test_repeater(benchmark):
assert benchmark(repeater,'a',5) == 'aaaaa'
assert benchmark(repeater,'Wub', 6 ) == 'Wub Wub Wub Wub Wub Wub '
| [
[
[
7,
15
]
],
[
[
37,
45
],
[
99,
107
],
[
147,
155
]
],
[
[
52,
65
]
]
] |
from distutils.core import setup
import py2exe , sys, os
sys.argv.append("py2exe")
setup(
options = {'py2exe': {'bundle_files': 1}},
windows = [{'script': "DNS.py", 'uac_info': "requireAdministrator"}],
zipfile = None,
)
| [
[
[
27,
32
],
[
86,
91
]
],
[
[
40,
46
]
],
[
[
49,
52
],
[
60,
63
]
],
[
[
54,
56
]
]
] |
from os import listdir, path
from types import GeneratorType
import six
from pyinfra import logger, pseudo_inventory
from pyinfra.api.inventory import Inventory
from pyinfra_cli.util import exec_file
# Hosts in an inventory can be just the hostname or a tuple (hostname, data)
ALLOWED_HOST_TYPES = tuple(
six.string_types + (tuple,),
)
# Group data can be any "core" Python type
ALLOWED_DATA_TYPES = tuple(
six.integer_types
+ (six.text_type, six.binary_type)
+ (bool, dict, list, set, tuple, float, complex),
)
def _is_inventory_group(key, value):
'''
Verify that a module-level variable (key = value) is a valid inventory group.
'''
if (
key.startswith('_')
or not isinstance(value, (list, tuple, GeneratorType))
):
return False
# If the group is a tuple of (hosts, data), check the hosts
if isinstance(value, tuple):
value = value[0]
# Expand any generators of hosts
if isinstance(value, GeneratorType):
value = list(value)
return all(
isinstance(item, ALLOWED_HOST_TYPES)
for item in value
)
def _is_group_data(key, value):
'''
Verify that a module-level variable (key = value) is a valid bit of group data.
'''
return (
isinstance(value, ALLOWED_DATA_TYPES)
and not key.startswith('_')
)
def _get_group_data(deploy_dir):
group_data = {}
group_data_directory = path.join(deploy_dir, 'group_data')
if path.exists(group_data_directory):
files = listdir(group_data_directory)
for file in files:
if not file.endswith('.py'):
continue
group_data_file = path.join(group_data_directory, file)
group_name = path.basename(file)[:-3]
logger.debug('Looking for group data in: {0}'.format(group_data_file))
# Read the files locals into a dict
attrs = exec_file(group_data_file, return_locals=True)
group_data[group_name] = {
key: value
for key, value in six.iteritems(attrs)
if _is_group_data(key, value)
}
return group_data
def _get_groups_from_filename(inventory_filename):
attrs = exec_file(inventory_filename, return_locals=True)
return {
key: value
for key, value in six.iteritems(attrs)
if _is_inventory_group(key, value)
}
def make_inventory(
inventory_filename,
deploy_dir=None,
ssh_port=None,
ssh_user=None,
ssh_key=None,
ssh_key_password=None,
ssh_password=None,
winrm_username=None,
winrm_password=None,
winrm_port=None,
):
'''
Builds a ``pyinfra.api.Inventory`` from the filesystem. If the file does not exist
and doesn't contain a / attempts to use that as the only hostname.
'''
if ssh_port is not None:
ssh_port = int(ssh_port)
file_groupname = None
# If we're not a valid file we assume a list of comma separated hostnames
if not path.exists(inventory_filename):
groups = {
'all': inventory_filename.split(','),
}
else:
groups = _get_groups_from_filename(inventory_filename)
# Used to set all the hosts to an additional group - that of the filename
# ie inventories/dev.py means all the hosts are in the dev group, if not present
file_groupname = path.basename(inventory_filename).rsplit('.')[0]
all_data = {}
if 'all' in groups:
all_hosts = groups.pop('all')
if isinstance(all_hosts, tuple):
all_hosts, all_data = all_hosts
# Build all out of the existing hosts if not defined
else:
all_hosts = []
for hosts in groups.values():
# Groups can be a list of hosts or tuple of (hosts, data)
hosts = hosts[0] if isinstance(hosts, tuple) else hosts
for host in hosts:
# Hosts can be a hostname or tuple of (hostname, data)
hostname = host[0] if isinstance(host, tuple) else host
if hostname not in all_hosts:
all_hosts.append(hostname)
groups['all'] = (all_hosts, all_data)
# Apply the filename group if not already defined
if file_groupname and file_groupname not in groups:
groups[file_groupname] = all_hosts
# In pyinfra an inventory is a combination of (hostnames + data). However, in CLI
# mode we want to be define this in separate files (inventory / group data). The
# issue is we want inventory access within the group data files - but at this point
# we're not ready to make an Inventory. So here we just create a fake one, and
# attach it to pseudo_inventory while we import the data files.
logger.debug('Creating fake inventory...')
fake_groups = {
# In API mode groups *must* be tuples of (hostnames, data)
name: group if isinstance(group, tuple) else (group, {})
for name, group in six.iteritems(groups)
}
fake_inventory = Inventory((all_hosts, all_data), **fake_groups)
pseudo_inventory.set(fake_inventory)
# Get all group data (group_data/*.py)
group_data = _get_group_data(deploy_dir)
# Reset the pseudo inventory
pseudo_inventory.reset()
# For each group load up any data
for name, hosts in six.iteritems(groups):
data = {}
if isinstance(hosts, tuple):
hosts, data = hosts
if name in group_data:
data.update(group_data.pop(name))
# Attach to group object
groups[name] = (hosts, data)
# Loop back through any leftover group data and create an empty (for now)
# group - this is because inventory @connectors can attach arbitrary groups
# to hosts, so we need to support that.
for name, data in six.iteritems(group_data):
groups[name] = ([], data)
return Inventory(
groups.pop('all'),
ssh_user=ssh_user,
ssh_key=ssh_key,
ssh_key_password=ssh_key_password,
ssh_port=ssh_port,
ssh_password=ssh_password,
winrm_username=winrm_username,
winrm_password=winrm_password,
winrm_port=winrm_port,
**groups
), file_groupname and file_groupname.lower()
| [
[
[
15,
22
],
[
1535,
1542
]
],
[
[
24,
28
],
[
1440,
1444
],
[
1484,
1488
],
[
1690,
1694
],
[
1753,
1757
],
[
3028,
3032
],
[
3409,
3413
]
],
[
[
47,
60
],
[
754,
767
],
[
984,
997
]
],
[
[
69,
72
],
[
312,
315
],
[
419,
422
],
[
444,
447
],
[
459,
462
],
[
2079,
2082
],
[
2357,
2360
],
[
4997,
5000
],
[
5349,
5352
],
[
5834,
5837
]
],
[
[
94,
100
],
[
1791,
1797
],
[
4774,
4780
]
],
[
[
102,
118
],
[
5098,
5114
],
[
5262,
5278
]
],
[
[
153,
162
],
[
5046,
5055
],
[
5907,
5916
]
],
[
[
192,
201
],
[
1931,
1940
],
[
2248,
2257
]
],
[
[
280,
298
],
[
1070,
1088
]
],
[
[
387,
405
],
[
1296,
1314
]
],
[
[
538,
557
],
[
2389,
2408
]
],
[
[
1128,
1142
],
[
2119,
2133
]
],
[
[
1364,
1379
],
[
5196,
5211
]
],
[
[
2189,
2214
],
[
3167,
3192
]
],
[
[
2433,
2447
]
]
] |
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
def test_constants_only():
try:
from pants.constants_only.constants import VALID_IDENTIFIERS # noqa
except ImportError as e:
assert False, 'Failed to correctly generate python package: %s' % e
| [
[
[
137,
156
]
]
] |
from struct import (unpack_from, calcsize)
from bglcapi.types import MessageType
from . import rsp
from . import evt
PARSE_MAP = {
MessageType.COMMAND_RESPONSE: {
0x00: rsp.message_to_target,
},
MessageType.EVENT: {
0x00: evt.message_to_host,
},
}
def from_binary(msg_type: int, msg_id: int, data: bytes, offset: int):
return PARSE_MAP[msg_type][msg_id](data, offset)
| [
[
[
20,
31
]
],
[
[
33,
41
]
],
[
[
70,
81
],
[
138,
149
],
[
218,
229
]
],
[
[
97,
100
],
[
184,
187
]
],
[
[
115,
118
],
[
253,
256
]
],
[
[
120,
129
],
[
367,
376
]
],
[
[
289,
300
]
]
] |
import itertools
import toposort
from populus.utils.contracts import (
compute_direct_dependency_graph,
compute_recursive_contract_dependencies,
)
def compute_deploy_order(dependency_graph):
"""
Given a dictionary that maps contract to their dependencies,
determine the overall dependency ordering for that set of contracts.
"""
return toposort.toposort_flatten(dict(dependency_graph))
def get_deploy_order(contracts_to_deploy, compiled_contracts):
# Extract and dependencies that exist due to library linking.
dependency_graph = compute_direct_dependency_graph(compiled_contracts.values())
global_deploy_order = compute_deploy_order(dependency_graph)
# Compute the full set of dependencies needed to deploy the desired
# contracts.
all_deploy_dependencies = set(itertools.chain.from_iterable(
compute_recursive_contract_dependencies(contract_name, dependency_graph)
for contract_name in contracts_to_deploy
))
all_contracts_to_deploy = all_deploy_dependencies.union(contracts_to_deploy)
# Now compute the order that the contracts should be deployed based on
# their dependencies.
deploy_order = tuple(
contract_name
for contract_name
in global_deploy_order
if contract_name in all_contracts_to_deploy
)
return deploy_order
| [
[
[
7,
16
],
[
822,
831
]
],
[
[
25,
33
],
[
368,
376
]
],
[
[
77,
108
],
[
572,
603
]
],
[
[
114,
153
],
[
861,
900
]
],
[
[
163,
183
],
[
659,
679
]
],
[
[
424,
440
]
]
] |
import utility
import static_sim_functions as smf
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import *
from time_series_grp import TimeSeriesGroupProcessing
from RandomNeighbors import RandomNeighbors
from sklearn.neighbors import NearestNeighbors
from sklearn.model_selection import KFold
import ml_modelling_ts as ml_ts
'''
This is just a run of the approaches using the methodologies, save the neighborhood for UI.
'''
def common_processing(df):
# Getting percentage between 0 to 1 rather than score values
df["tschq12"] = df["tschq12"].apply(lambda x: x / 100)
df["tschq16"] = df["tschq16"].apply(lambda x: x / 100)
df["tschq17"] = df["tschq17"].apply(lambda x: x / 100)
# Feature engineering family history
df["tschq04"] = df.apply(smf.create_cols_family_hist, axis=1)
return df
def get_common_cols(col1, col2):
common_elements = set(col1).intersection(col2)
return common_elements
import properties
import pandas as pd
def initial_processing():
# Read the csv of the tschq data and make the necessary things
tschq = pd.read_pickle(properties.data_location + "/input_pckl/" + "3_q.pckl")
# Cleaning tschq05 question. There is an abstraction for a row we add common value
def filter_age(x):
if isinstance(x, int):
# Append the most common value obtained
return tschq["tschq05"].value_counts().head(1).index[0]
else:
return x
tschq["tschq05"] = tschq["tschq05"].apply(filter_age)
# Drop the questionnaire_id and created_at
tschq.drop(["questionnaire_id", "created_at"], axis=1, inplace=True)
# Lets read and join two questionnaires tschq and hq
hq = pd.read_pickle("data/input_pckl/4_q.pckl")
hq.isna().sum(axis=0)
# By looking at the output we are sure that h5 and h6 do not contribute much and can be dropped
hq.drop(["hq05", "hq06"], axis=1, inplace=True)
hq_df = hq.set_index("user_id")
df = tschq.join(hq_df.iloc[:, 2:], on="user_id")
drop_cols = ["tschq01", "tschq25", "tschq07-2",
"tschq13", "tschq04-1", "tschq04-2"]
# Getting percentage between 0 to 1 rather than score values
df["tschq12"] = df["tschq12"].apply(lambda x: x / 100)
df["tschq16"] = df["tschq16"].apply(lambda x: x / 100)
df["tschq17"] = df["tschq17"].apply(lambda x: x / 100)
df["tschq04"] = df.apply(smf.create_cols_family_hist, axis=1)
df.drop(drop_cols, axis=1, inplace=True)
# Set the heom object, while using the required similarity
# Alternative
# Categorical boolean mask
categorical_feature_mask = df.iloc[:, 1:].infer_objects().dtypes == object
other_feature_mask = df.iloc[:, 1:].infer_objects().dtypes != object
# filter categorical columns using mask and turn it into a list
categorical_cols = df.iloc[:, 1:].columns[categorical_feature_mask].tolist()
num_cols = df.iloc[:, 1:].columns[other_feature_mask].tolist()
cat_idx = [df.iloc[:, 1:].columns.get_loc(val) for val in categorical_cols]
num_idx = [df.iloc[:, 1:].columns.get_loc(val) for val in num_cols]
return cat_idx, num_idx, df
import os
import traceback
def save_data_objs(df, quest_cmbs="all"):
try:
if not os.path.isdir(properties.model_location + quest_cmbs):
os.makedirs(properties.model_location + quest_cmbs)
utility.save_model("".join(quest_cmbs + "/" + quest_cmbs + "_stat_q_data"), df)
encoded_combined_df = smf.preprocess(df, quest_cmbs, age_bin=False,
process_model_name="".join(quest_cmbs + "/" +
quest_cmbs + "_stat_q_data_oe_model"),
prediction=False)
# Save this encoded_data
utility.save_model("".join(quest_cmbs + "/" +
quest_cmbs + "_stat_q_data_encoded"), encoded_combined_df)
return encoded_combined_df
# Use this data to build the data over static data.
except Exception:
print(traceback.print_exc())
def weighted_average(distress_list):
average = np.asarray(distress_list, dtype=float).mean()
return average
# Function computes the weighted average as predictions for given prediction time point
def compute_weighted_avg(n_idx, encoded_data, pred_at_list, method="mean", dist_nn=None, wt_flag=False):
preds = list()
# Prediction for four time points
for pval in pred_at_list:
distress_list = list()
for vals in n_idx:
u_id = encoded_data["user_id"].iloc[vals]
user_ts = tsg_data.get_usr_mday_ts_predict(int(u_id))
# 3rd val of the series is s03 of the neighbor
print("{}, {} Values ".format(int(pval), int(u_id)))
if len(user_ts) > int(pval):
value = user_ts[int(pval), :][3]
elif len(user_ts) <= int(pval):
value = user_ts[len(user_ts)-1, :][3]
distress_list.append(value)
if wt_flag:
print("Calling by weighted distance prediction for distress")
preds.append(weighted_distance_prediction(distress_list, dist_nn))
else:
print("Calling weighted average to predict distress")
preds.append(weighted_average(distress_list))
return preds
def weighted_distance_prediction(p_preds, distance):
# Inverse distance so that highest weight is given to the nearest one and least to the farther
inv_dist = np.divide(1, distance)
#s03 - tinnitus distress weighted by distance is given as
s03_pred = (np.sum(np.multiply(p_preds, inv_dist)) / (np.sum(inv_dist)))
return s03_pred
def compute(test_nn, encoded_data,
pred_list, method="mean", dist_nn=None, wt_dist=False):
from sklearn.linear_model import LinearRegression
preds = list()
for point in pred_list:
nn_preds = list()
intercepts_list = list()
coeff_list = list()
for nn in test_nn:
u_id = encoded_data["user_id"].iloc[nn]
user_ts = tsg_data.get_usr_mday_ts_predict(int(u_id))
# Obtain the time series until time point and fit the data for linear regression
diff_arr = np.abs(np.subtract(point, user_ts[:, 1]))
diff_near_idx = np.where(diff_arr == diff_arr.min())
print("minimum to the time point is at -- ", diff_near_idx)
# difference near index. Handling for the length of users
usr_idx = diff_near_idx[0][0]
user_ts_p = user_ts[:usr_idx]
user_ts_df = pd.DataFrame(user_ts_p, columns=["day", "day_sess_index",
"s02", "s03", "s04",
"s05", "s06", "s07"])
X = user_ts_df[["day_sess_index"]]
# We show for tinnitus distress. This can be extended to other physiological variables as well.
y = user_ts_df[["s03"]]
# Fit on X axis as time and Y as the s03 predictive value.
reg_fit = LinearRegression(normalize=True)
reg_fit.fit(X, y)
# If weighted_distance is true, then predict by each of the nn_user and add to list. This will be used for
# calculating weighted_distance_predictions.
if wt_dist:
nn_pred = reg_fit.predict(np.asarray(point).reshape(1, -1))
nn_preds.append(nn_pred[0][0])
else:
intercepts_list.append(reg_fit.intercept_)
coeff_list.append(reg_fit.coef_)
if wt_dist:
print("Predicting the value of s03 for the user by a weighted average weighted by distance")
preds.append(weighted_distance_prediction(nn_preds, dist_nn))
else:
print("Predicting the value of s3 over the averaged slope and intercepts of "
"observations of the neighbors")
# y = mx + c, where m is the average slope of the neighbors and c is the average intercept obtained.
print("The equation to estimate s03 for the user is {}".format("".join(str(np.asarray(coeff_list).mean())) +
"* time_index + " +
str(np.asarray(intercepts_list).mean())))
y = np.multiply(np.asarray(coeff_list).mean(), point) + np.asarray(intercepts_list).mean()
preds.append(y)
return preds
def compute_linear_regression(test_nn, encoded_data, pred_list, method="mean"):
#test_nn = test_user_nn
#pred_list = prediction_at_list
from sklearn.linear_model import LinearRegression
preds = list()
for point in pred_list:
attr_list = list()
intercepts_list = list()
coeff_list = list()
for nn in test_nn:
u_id = encoded_data["user_id"].iloc[nn]
user_ts = tsg_data.get_m_day_ts_enumerate(int(11))
diff_arr = np.abs(np.subtract(point, user_ts[:, 1]))
diff_near_idx = np.where(diff_arr == diff_arr.min())
print(diff_near_idx)
# difference near index
usr_vals = np.array([user_ts[n_id] for n_id in diff_near_idx[0]])
if len(usr_vals) > 1:
value = usr_vals.mean(axis=0)
print("vavg" + str(value))
else:
value = usr_vals[0]
print("v" + str(value))
attr_list.append(value)
df = pd.DataFrame(user_ts)
df.columns = ["day", "day_session_id",
"s02", "s03",
"s04", "s05",
"s06", "s07"]
reg_model = LinearRegression(normalize=True)
user_x = df[["day_session_id", "s04", "s05", "s06"]].to_numpy()
user_s03 = df[["s03"]].to_numpy().ravel()
reg_model.fit(user_x, user_s03)
intercepts_list.append(reg_model.intercept_)
coeff_list.append(reg_model.coef_)
# y = mx + c, where m is the average slope of the neighbors and c is the average intercept obtained.
# convert coeff's to numpy for manipulations
numpy_attr_list = np.array(attr_list)
print(numpy_attr_list)
avg_np_attr_list = numpy_attr_list[:, 4:].mean(axis=0)
print(avg_np_attr_list)
numpy_coeff_list = np.array(coeff_list)
print(numpy_coeff_list)
print(numpy_coeff_list.mean(axis=0))
# Day_index, s02, s04, s05, s06 ,s07 - Use only the fit independent features to estimate the dependent
y = np.multiply(numpy_coeff_list[:, 0].mean(), point) + \
np.multiply(numpy_coeff_list[:, 1].mean(), avg_np_attr_list[0]) + \
np.multiply(numpy_coeff_list[:, 2].mean(), avg_np_attr_list[1]) + \
np.multiply(numpy_coeff_list[:, 3].mean(), avg_np_attr_list[2]) + \
np.asarray(intercepts_list).mean()
preds.append(y)
print(preds)
return preds
# Create test label as ground truth at prediction point.
def create_y_labels(test_data, prediction_at, method="mean"):
y_test = list()
for i in range(0, len(test_data)):
test_ts_test1 = tsg_data.get_usr_mday_ts_predict(int(test_data.iloc[i]["user_id"]))
# print(len(test_ts_test1))
if len(test_ts_test1) >= prediction_at:
y_test.append(test_ts_test1[prediction_at - 1][2])
elif len(test_ts_test1) < prediction_at:
y_test.append(test_ts_test1[len(test_ts_test1) - 1][2])
return y_test
# Create reference points for multiple reference predictions
def get_pred_ref_points(user_id, ndays, method="mean"):
# Using the default tsg which is mean observations of the user
test_user_ts = tsg_data.get_usr_mday_ts_predict(user_id)
user_ts_idx = test_user_ts[:, 1]
# ["date", "time_idx", "s02", "s03", "s04", "s05", "s06", "s07]
user_distress = test_user_ts[:, 3]
# Near evaluation. Change this for farther evaluations
# Near -> 0.20, 0.10
# Far -> 1 - (Near)
# Near points are of the sequence of observation because we are sure all stay until here.
#prediction_at = 10
# Far prediction point is the last N% of the test user time series
# It is tested for 0.75, 0.8, 0.9
prediction_at = round(len(user_ts_idx) * 0.80)
y_labels = user_distress[prediction_at:prediction_at + ndays].tolist()
prediction_at_list = user_ts_idx[prediction_at:prediction_at + ndays].tolist()
return y_labels, prediction_at_list
def do_test(test_data, out_writer, csv_out_writer,
ndays, near_idxs, encoded_data, fold_count="final",
method="mean", dist_nn=None, wt_dist_flag=False):
for i in range(0, len(test_data)):
user_id = int(test_data.iloc[i]["user_id"])
print("User- Id ", user_id)
y_labels, prediction_at_list = get_pred_ref_points(user_id, ndays, method=method)
# y_labels = create_y_labels(X_test, preds, method="mean")
# Weighting by inverse of neighbor
if wt_dist_flag:
test_user_nn = near_idxs[i]
test_user_dist = dist_nn[i]
pred_weighted_average = compute_weighted_avg(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=test_user_dist, wt_flag=wt_dist_flag)
pred_lr = compute(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=test_user_dist, wt_dist=wt_dist_flag)
else:
test_user_nn = near_idxs[i]
pred_weighted_average = compute_weighted_avg(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=None, wt_flag=False)
pred_lr = compute(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=None, wt_dist=False)
# calculate
if not fold_count == "final":
print("Evaluating for the fold-" + str(fold_count) + " for the forecast reference points - " +
str(prediction_at_list))
out_writer.write("Evaluating for the forecast reference points -- " +
str(prediction_at_list) + "for the method evaluation -- " + str(method) + "\n")
else:
print("Evaluating for forecast reference points - " +
str(prediction_at_list))
out_writer.write("Evaluating over the forecast reference points -- " +
str(prediction_at_list) + "for the method evaluation -- " + str(method) + "\n")
print("Computing RMSE for weighted average based predictions on the User -- " + str(user_id))
print("---------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
print("RMSE -- ", np.sqrt(mean_squared_error(y_labels, pred_weighted_average)))
out_writer.write("RMSE -- " + str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "\n")
# Writing to csv file
if not fold_count == "final":
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "," +
"weighted_average" + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_weighted_average[0]) + "," + str(pred_weighted_average[1])
+ "," + str(pred_weighted_average[2]) + "\n"))
else:
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "," +
"weighted_average" + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_weighted_average[0]) + "," + str(pred_weighted_average[1])
+ "," + str(pred_weighted_average[2]) + "\n"))
print("-----------------------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
print("Computing RMSE for {} {} based predictions for the user -- {}"
.format(str("weighted_distance" + str(wt_dist_flag)), str("linear_regression"), str(user_id)))
out_writer.write("Computing RMSE for {} {} based predictions for the user -- {} \n"
.format(str("weighted_distance" + str(wt_dist_flag)), str("linear_regression"), str(user_id)))
print("RMSE -- ", np.sqrt(mean_squared_error(y_labels, pred_lr)))
out_writer.write("RMSE -- " + str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "\n")
print("---------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
# Write to csv file
if not fold_count == "final":
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "," +
str("lr") + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_lr[0]) + "," + str(pred_lr[1]) + "," + str(
pred_lr[2]) + "\n"))
else:
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "," +
str("lr") + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_lr[0]) + "," + str(pred_lr[1]) + "," + str(
pred_lr[2]) + "\n"))
# Change method and execute to get the predictions appropriately, these are configurations
eval_method = "mean"
# Default day readings for all test users must be at mean and prediction are between min - mean - max
tsg_data = TimeSeriesGroupProcessing(method=eval_method)
# For all combinations evaluation it must be set to True
quest_cmb_all = False
# Same random state needs to be maintained to get consistent test data over all combinations and repeatable results
random_state = 1220
# It is the setting to get the ahead prediction for tinnitus distress, 3 here means for 3 days
# min it is a day and max of about 60days between points which is not an usual scenario
ndays = 3
# Build the default NN with all the combination.
if not quest_cmb_all:
for key, val in properties.quest_comb.items():
# Build NN for each category
print("Building NN for the question combination -- " + str(key))
cat_idx, num_idx, combined_df = smf.initial_processing(key, val, append_synthethic=False)
# Build and get the knn NN for prediction over test instances.
# Save the data objs
encoded_data = save_data_objs(combined_df, key)
out_writer = open("".join("output/output_" + str(key) + "_" + str(eval_method) + "_heom_norm.txt"), "w+")
csv_out_writer = open("".join("output/output_" + str(key) + "_" + str(eval_method) + "_heom_norm.csv"), "w+")
csv_out_writer.write("".join("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n"))
#Create a test set
X, test = train_test_split(encoded_data,
test_size=0.20,
random_state=random_state)
def filter_train_ids(x):
# print(x)
if x["user_id"] in train_user_ids:
return x
def filter_test_ids(x):
# print(x)
if x["user_id"] in test_user_ids:
return x
train_user_ids = X["user_id"].to_list()
X_train_data_ui = combined_df.apply(filter_train_ids, axis=1, result_type="broadcast").dropna()
X_train_data_ui["user_id"] = X_train_data_ui["user_id"].apply(int)
# Save the non encoded train data for visualization purposes
utility.save_model("".join(key + "/" + key + "_train_stat_q_data"), X_train_data_ui)
# filter and get the data to show to the UI for the test data.
test_user_ids = test["user_id"].to_list()
X_test_data_ui = combined_df.apply(filter_test_ids, axis=1, result_type="broadcast").dropna()
X_test_data_ui["user_id"] = X_test_data_ui["user_id"].apply(int)
# Save the data_ui object as json
#test_data = {}
#test_data["users"] = X_test_data_ui.to_dict("r")
#utility.save_data("".join("test_data_ui_" + key), test_data)
from HEOM import HEOM
# Can be done at prediction too.
from sklearn.metrics.pairwise import cosine_distances
from sklearn.linear_model import LinearRegression
from scipy.spatial.distance import pdist, squareform
from scipy.stats import zscore
heom = HEOM(X.to_numpy(), cat_idx, num_idx)
sim_matrix = pdist(X.to_numpy()[:, 1:], heom.heom_distance)
mean_heom_distance = sim_matrix.mean()
knn = NearestNeighbors(n_neighbors=5, metric=heom.heom_distance, radius=mean_heom_distance)
knn.fit(X.iloc[:, 1:])
dist, test_idx = knn.kneighbors(test.to_numpy()[:, 1:], n_neighbors=5)
# Execute without any varying for saving the KNN as pickle to be used by UI
do_test(test, out_writer, csv_out_writer, ndays, test_idx, X,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
utility.save_model("".join(key + "/" + "knn_static"), knn)
utility.save_model("".join(key + "/" + "train_sim_data.pckl"), X)
out_writer.close()
csv_out_writer.close()
# All feature combinations
cat_idx, num_idx, combined_df = initial_processing()
# Build KNN for each category
print("Building KNN for the question combination -- " + str("overall"))
# Save the data objs
encoded_data = save_data_objs(combined_df, "overall")
X, test = train_test_split(encoded_data,
test_size=0.20,
random_state=random_state)
def filter_train_ids(x):
# print(x)
if x["user_id"] in train_user_ids:
return x
def filter_test_ids(x):
# print(x)
if x["user_id"] in test_user_ids:
return x
train_user_ids = X["user_id"].to_list()
X_train_data_ui = combined_df.apply(filter_train_ids, axis=1, result_type="broadcast").dropna()
X_train_data_ui["user_id"] = X_train_data_ui["user_id"].apply(int)
# Save in overall.
utility.save_model("".join("overall" + "/" + "overall" + "_train_stat_q_data"), X_train_data_ui)
# filter and get the data to show to the UI for the test data.
test_user_ids = test["user_id"].to_list()
X_test_data_ui = combined_df.apply(filter_test_ids, axis=1, result_type="broadcast").dropna()
X_test_data_ui["user_id"] = X_test_data_ui["user_id"].apply(int)
# Save the data_ui object as json
test_data = {}
test_data["users"] = X_test_data_ui.to_dict("r")
utility.save_data("test_data_ui_x_test", test_data)
# Save the results to out_writer
out_writer = open("output/overall_output_folds_" + str(eval_method) + ".txt", "w+")
csv_out_writer = open("output/overall_output_folds_" + str(eval_method) + ".csv", "w+")
# First get the time series for a given test patient and the reference point and iterate to evaluate
csv_out_writer.write("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n")
# Split the data into train and test
from sklearn.model_selection import train_test_split
import utility
from HEOM import HEOM
#Can be done at prediction too.
from sklearn.metrics.pairwise import cosine_distances
from sklearn.linear_model import LinearRegression
from scipy.spatial.distance import pdist, squareform
from scipy.stats import zscore
heom = HEOM(X.to_numpy()[:, 1:], cat_idx, num_idx)
sim_matrix = pdist(X.to_numpy()[:, 1:], heom.heom_distance)
mean_heom_distance = sim_matrix.mean()
knn = NearestNeighbors(n_neighbors=5, metric=heom.heom_distance, radius=mean_heom_distance)
knn.fit(X.to_numpy()[:, 1:])
dist, idx_test = knn.kneighbors(test.to_numpy()[:, 1:], n_neighbors=5)
# First get the time series for a given test patient and the reference point and iterate to evaluate
do_test(test, out_writer, csv_out_writer, ndays, idx_test, X,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
out_writer.close()
csv_out_writer.close()
# End save the nearest neighbor as data objects, so that can be used from the UI
utility.save_model("".join("overall/" + "knn_static"), knn)
utility.save_model("".join("overall" + "/" + "train_sim_data.pckl"), X)
'''
ML Modelling based on s02 - loudness.
Note: This has to be run once the all feature execution is completed since we build upon a custom similarity matrix,
it is essential that the same split of train test happen so that it can be verified from the application.
'''
# Create train and test containing same users in train and test as per static data. (Note: Run above code and then this
# because same set of train test users are used)
def splitData(dataset, test_user_ids):
train_data = dataset[~dataset["user_id"].isin(test_user_ids)]
test_data = dataset[dataset["user_id"].isin(test_user_ids)]
return train_data, test_data
# Save both train and test matrix
def save_ts_objs(train, test, location_name):
try:
if not os.path.isdir(properties.model_location + location_name):
os.makedirs(properties.model_location + location_name)
utility.save_model("".join(location_name + "/" + location_name + "_train_data"), train)
utility.save_model("".join(location_name + "/" + location_name + "_test_data"), test)
except Exception:
print(traceback.print_exc())
X = ml_ts.process_data(grouping="day")
# Calculate pairwise distance and create a dataframe for the same
from scipy.spatial.distance import pdist, squareform
# Cross validate here based on the same split of static data here.
# Note: Only one combination will be present
C = np.zeros((X.shape[0], X.shape[0]))
for i in range(0, len(X)):
for j in range(0, len(X)):
dist = ml_ts.compute_dist(X[:, 1][i], X[:, 1][j])
C[i][j] = dist
C_df = pd.DataFrame(C)
#C_df.to_csv("sim_ema.csv")
# Threshold overall distance for making within radius
threshold_distance = sum(C_df.mean())/len(C_df)
user_ids = []
for val in X:
user_ids.append(val[0])
C_df["user_id"] = user_ids
train_data, test_data = splitData(C_df, test_user_ids)
# Save the time series data objects as dynamic_ts into model folder
save_ts_objs(train_data, test_data, "dynamic_ts")
out_writer = open("".join("output/output_ema_" + str(eval_method) + "_.txt"), "w+")
csv_out_writer = open("".join("output/output_ema_" + str(eval_method) + "_.csv"), "w+")
csv_out_writer.write("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n")
# Test on the final test set. Note there is no varying K just to save the NN here.
# It should be noted we use NearesetNeighbors and not KNearestNeighbors classifier.
knn_ema = NearestNeighbors(n_neighbors=5, metric="precomputed", radius=threshold_distance)
knn_ema.fit(train_data[train_data.index])
ema_dist, ema_idx = knn_ema.kneighbors(test_data[train_data.index], n_neighbors=5)
# First get the time series for a given test patient and the reference point and iterate to evaluate
do_test(test_data, out_writer, csv_out_writer, ndays, ema_idx, encoded_data,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
# Close the writers
out_writer.close()
csv_out_writer.close()
# Save the similarity search index KNN
utility.save_model("".join("dynamic_ts" + "/" + "dynamic_ts" + "_knn"), knn_ema)
| [
[
[
7,
14
],
[
20810,
20817
],
[
22314,
22321
],
[
22381,
22388
],
[
23345,
23352
],
[
23808,
23815
]
],
[
[
22,
49
],
[
19458,
19461
],
[
815,
818
],
[
2434,
2437
],
[
3515,
3518
]
],
[
[
57,
68
],
[
26889,
26891
],
[
4233,
4235
],
[
5612,
5614
],
[
5714,
5716
],
[
5721,
5723
],
[
5756,
5758
],
[
6351,
6353
],
[
6358,
6360
],
[
6421,
6423
],
[
7514,
7516
],
[
8273,
8275
],
[
8473,
8475
],
[
8527,
8529
],
[
8539,
8541
],
[
8579,
8581
],
[
9160,
9162
],
[
9167,
9169
],
[
9230,
9232
],
[
9359,
9361
],
[
10404,
10406
],
[
10579,
10581
],
[
10802,
10804
],
[
10868,
10870
],
[
10948,
10950
],
[
11028,
11030
],
[
11108,
11110
],
[
15169,
15171
],
[
15273,
15275
],
[
15521,
15523
],
[
16095,
16097
],
[
17162,
17164
],
[
17252,
17254
],
[
17657,
17659
],
[
18140,
18142
]
],
[
[
105,
121
],
[
20104,
20120
],
[
22781,
22797
]
],
[
[
150,
151
],
[
15177,
15195
],
[
15281,
15299
],
[
15529,
15547
],
[
16103,
16121
],
[
17170,
17188
],
[
17260,
17278
],
[
17665,
17683
],
[
18148,
18166
]
],
[
[
180,
205
],
[
18730,
18755
]
],
[
[
234,
249
]
],
[
[
280,
296
],
[
21864,
21880
],
[
24794,
24810
],
[
27951,
27967
]
],
[
[
333,
338
]
],
[
[
347,
371
],
[
26618,
26623
],
[
26997,
27002
]
],
[
[
479,
496
]
],
[
[
873,
888
]
],
[
[
988,
998
],
[
19276,
19286
],
[
1139,
1149
],
[
3291,
3301
],
[
3356,
3366
],
[
26251,
26261
],
[
26319,
26329
]
],
[
[
1006,
1018
],
[
27071,
27073
],
[
1124,
1126
],
[
1740,
1742
],
[
6710,
6712
],
[
9687,
9689
]
],
[
[
1023,
1041
],
[
22568,
22586
]
],
[
[
3191,
3193
],
[
3277,
3279
],
[
3344,
3346
],
[
26237,
26239
],
[
26307,
26309
]
],
[
[
3201,
3210
],
[
4157,
4166
],
[
26589,
26598
]
],
[
[
3215,
3229
],
[
19640,
19654
],
[
22730,
22744
]
],
[
[
4186,
4202
],
[
5393,
5409
]
],
[
[
4393,
4413
],
[
13383,
13403
],
[
13826,
13846
]
],
[
[
5449,
5477
],
[
5234,
5262
],
[
7872,
7900
]
],
[
[
5802,
5809
],
[
13589,
13596
],
[
14018,
14025
]
],
[
[
8666,
8691
]
],
[
[
11264,
11279
]
],
[
[
11822,
11841
],
[
13080,
13099
]
],
[
[
12741,
12748
],
[
22153,
22160
],
[
25083,
25090
],
[
28258,
28265
]
],
[
[
18595,
18606
],
[
18763,
18774
],
[
19748,
19759
],
[
19866,
19877
],
[
22258,
22269
],
[
23949,
23960
],
[
24037,
24048
],
[
25180,
25191
],
[
27535,
27546
],
[
27623,
27634
],
[
28370,
28381
]
],
[
[
18719,
18727
],
[
4716,
4724
],
[
6191,
6199
],
[
9096,
9104
],
[
11405,
11413
],
[
11960,
11968
]
],
[
[
18833,
18846
],
[
19241,
19254
]
],
[
[
18971,
18983
],
[
20234,
20246
],
[
22911,
22923
]
],
[
[
19174,
19179
],
[
22195,
22200
],
[
25125,
25130
],
[
28305,
28310
]
],
[
[
19264,
19267
],
[
19411,
19414
],
[
19481,
19484
],
[
19668,
19671
],
[
19731,
19734
],
[
19849,
19852
],
[
20837,
20840
],
[
20849,
20852
],
[
22341,
22344
],
[
22408,
22411
]
],
[
[
19269,
19272
],
[
19486,
19489
]
],
[
[
19426,
19433
],
[
21716,
21723
]
],
[
[
19435,
19442
],
[
21725,
21732
]
],
[
[
19444,
19455
],
[
19655,
19666
],
[
20580,
20591
],
[
21043,
21054
]
],
[
[
19625,
19637
],
[
20121,
20133
]
],
[
[
19682,
19692
],
[
22167,
22177
],
[
22456,
22466
]
],
[
[
19796,
19810
],
[
19915,
19929
],
[
22179,
22193
],
[
22483,
22497
]
],
[
[
20094,
20095
],
[
20530,
20531
],
[
21702,
21703
],
[
21761,
21762
],
[
21966,
21967
],
[
22212,
22213
],
[
22444,
22445
]
],
[
[
20097,
20101
],
[
20991,
20995
],
[
22021,
22025
],
[
22161,
22165
]
],
[
[
20261,
20277
],
[
20598,
20614
]
],
[
[
20390,
20405
],
[
21061,
21076
]
],
[
[
20513,
20527
],
[
20336,
20350
]
],
[
[
20562,
20577
],
[
20695,
20710
],
[
20666,
20681
],
[
20878,
20893
]
],
[
[
20975,
20988
],
[
20464,
20477
]
],
[
[
21026,
21040
],
[
21157,
21171
],
[
21129,
21143
]
],
[
[
21415,
21419
],
[
21697,
21701
]
],
[
[
21506,
21522
]
],
[
[
21564,
21580
]
],
[
[
21624,
21629
],
[
21755,
21760
]
],
[
[
21631,
21641
]
],
[
[
21674,
21680
]
],
[
[
21690,
21694
],
[
21782,
21786
],
[
21903,
21907
]
],
[
[
21742,
21752
],
[
21831,
21841
]
],
[
[
21810,
21828
],
[
21930,
21948
]
],
[
[
21858,
21861
],
[
21958,
21961
],
[
22006,
22009
],
[
22368,
22371
]
],
[
[
21989,
21993
]
],
[
[
21995,
22003
],
[
22202,
22210
]
],
[
[
20261,
20277
],
[
20598,
20614
]
],
[
[
20390,
20405
],
[
21061,
21076
]
],
[
[
21415,
21419
],
[
21697,
21701
]
],
[
[
21506,
21522
]
],
[
[
21564,
21580
]
],
[
[
21624,
21629
],
[
21755,
21760
]
],
[
[
21631,
21641
]
],
[
[
21674,
21680
]
],
[
[
22536,
22543
],
[
24670,
24677
]
],
[
[
22545,
22552
],
[
24679,
24686
]
],
[
[
22554,
22565
],
[
22745,
22756
],
[
23180,
23191
],
[
23566,
23577
]
],
[
[
22715,
22727
],
[
22798,
22810
],
[
28321,
28333
]
],
[
[
22771,
22772
],
[
23138,
23139
],
[
24649,
24650
],
[
24707,
24708
],
[
24888,
24889
],
[
25142,
25143
],
[
25481,
25482
]
],
[
[
22774,
22778
],
[
23522,
23526
],
[
24941,
24945
],
[
25091,
25095
]
],
[
[
22931,
22947
],
[
23198,
23214
]
],
[
[
23029,
23044
],
[
23584,
23599
]
],
[
[
23121,
23135
],
[
22990,
23004
]
],
[
[
23162,
23177
],
[
23287,
23302
],
[
23258,
23273
],
[
23425,
23440
]
],
[
[
23506,
23519
],
[
27348,
27361
],
[
23087,
23100
]
],
[
[
23549,
23563
],
[
23672,
23686
],
[
23644,
23658
],
[
23780,
23794
]
],
[
[
23744,
23753
],
[
23759,
23768
],
[
23849,
23858
]
],
[
[
23894,
23904
],
[
25097,
25107
],
[
25228,
25238
]
],
[
[
23978,
23992
],
[
24168,
24182
],
[
25109,
25123
],
[
25247,
25261
]
],
[
[
24361,
24377
]
],
[
[
24385,
24392
],
[
25352,
25359
],
[
25412,
25419
],
[
28520,
28527
],
[
3404,
3411
],
[
3880,
3887
],
[
26370,
26377
],
[
26466,
26473
]
],
[
[
24410,
24414
],
[
24644,
24648
]
],
[
[
24484,
24500
]
],
[
[
24534,
24550
]
],
[
[
24586,
24591
],
[
24701,
24706
]
],
[
[
24593,
24603
]
],
[
[
24628,
24634
]
],
[
[
24637,
24641
],
[
24728,
24732
],
[
24833,
24837
]
],
[
[
24688,
24698
],
[
24769,
24779
]
],
[
[
24748,
24766
],
[
24860,
24878
]
],
[
[
24788,
24791
],
[
24880,
24883
],
[
24926,
24929
],
[
25407,
25410
]
],
[
[
24909,
24913
]
],
[
[
24915,
24923
],
[
25132,
25140
]
],
[
[
25933,
25942
],
[
27332,
27341
]
],
[
[
26171,
26183
],
[
27431,
27443
]
],
[
[
26614,
26615
],
[
26899,
26900
],
[
26911,
26912
],
[
26946,
26947
],
[
26977,
26978
],
[
27016,
27017
],
[
27028,
27029
],
[
27247,
27248
]
],
[
[
26755,
26760
]
],
[
[
26762,
26772
]
],
[
[
26885,
26886
],
[
27048,
27049
],
[
27084,
27085
]
],
[
[
26928,
26929
],
[
27024,
27025
],
[
27050,
27051
]
],
[
[
26959,
26960
],
[
27036,
27037
],
[
27053,
27054
]
],
[
[
26990,
26994
],
[
27058,
27062
]
],
[
[
27064,
27068
],
[
27197,
27201
],
[
27214,
27218
],
[
27279,
27283
],
[
27342,
27346
]
],
[
[
27172,
27190
],
[
28012,
28030
]
],
[
[
27222,
27230
],
[
27254,
27262
],
[
27297,
27305
]
],
[
[
27240,
27243
],
[
27270,
27273
]
],
[
[
27308,
27318
],
[
27444,
27454
],
[
28044,
28054
],
[
28055,
28065
],
[
28123,
28133
]
],
[
[
27320,
27329
],
[
27456,
27465
],
[
28113,
28122
],
[
28266,
28275
]
],
[
[
27482,
27492
],
[
28277,
28287
],
[
28438,
28448
]
],
[
[
27566,
27580
],
[
27655,
27669
],
[
28289,
28303
],
[
28457,
28471
]
],
[
[
27941,
27948
],
[
28032,
28039
],
[
28094,
28101
],
[
28592,
28599
]
],
[
[
28074,
28082
]
],
[
[
28084,
28091
],
[
28312,
28319
]
]
] |
#!/usr/bin/env python2
"""
Copyright (c) 2016, Bliksem Labs B.V.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import numpy
from scipy.interpolate import griddata
import matplotlib.pyplot as plt
try:
from lxml import etree
except ImportError:
try:
# Python 2.5
import xml.etree.cElementTree as etree
except ImportError:
try:
# Python 2.5
import xml.etree.ElementTree as etree
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree
except ImportError:
print("Failed to import ElementTree from any known place")
# Process the Kismet GPSXML into columns.
def parse_xml(filename):
tree = etree.parse(open(filename, 'rb'))
ts = []
bssid = []
signal = []
lat = []
lon = []
walked_lon = []
walked_lat = []
for z in tree.findall('.//gps-point'):
# A lon/lat filter might be applied here
# if float(z.get('lon')) < 3.942:
# continue
if z.get('bssid') == 'GP:SD:TR:AC:KL:OG':
walked_lon.append(float(z.get('lon')))
walked_lat.append(float(z.get('lat')))
elif z.get('signal_dbm') is not None:
bssid.append(z.get('bssid'))
ts.append(int(z.get('time-sec')))
lat.append(float(z.get('lat')))
lon.append(float(z.get('lon')))
signal.append(int(z.get('signal_dbm')))
return (ts, bssid, signal, lat, lon, walked_lon, walked_lat,)
# Draw parsed data on a surface
def draw_data(ts, bssid, signal, lat, lon, walked_lon, walked_lat):
# We create a grid of 1000x1000
grid_x, grid_y = numpy.mgrid[min(walked_lon):max(walked_lon):1000j, min(walked_lat):max(walked_lat):1000j]
# We want to draw all unique APs
bssids = list(set(bssid))
# For each BSSID...
for s in bssids:
points_lon = []
points_lat = []
values = []
h = []
# Apply all points on an intermediate surface
# so we can distinct points where we were, without reception
for i in range(0, len(bssid)):
if bssid[i] == s:
hc = hash((lon[i], lat[i]))
if hc not in h:
points_lon.append(lon[i])
points_lat.append(lat[i])
values.append(float(signal[i]))
h.append(hash((lon[i], lat[i])))
# Optional: apply -100dBm where we don't have gathered data
for i in range(0, len(walked_lon)):
hc = hash((walked_lon[i], walked_lat[i]))
if hc not in h:
points_lon.append(lon[i])
points_lat.append(lat[i])
values.append(float(-100))
h.append(hash((walked_lon[i], walked_lat[i])))
# Interpolate the data
grid = griddata((points_lon, points_lat), numpy.array(values), (grid_x, grid_y), method='cubic')
# Store the bitmap in the current folder.
plt.show()
plt.imsave('%s.png' % (s), grid.T)
# Calculate the World File for use in Qgis
a = ((max(walked_lon)-min(walked_lon))/1000)
b = 0
c = 0
d = ((max(walked_lat)-min(walked_lat))/1000)
e = min(walked_lon)
f = min(walked_lat)
# Write the World File
open('%s.pngw' % (s), 'w').write('%.16f\n%d\n%d\n%.16f\n%.16f\n%.16f' % (a, b, c, d, e, f,))
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage %s << /path/to/Kismet.gpsxml >>" % (sys.argv[0]))
sys.exit(-1)
draw_data(*parse_xml(sys.argv[1]))
| [
[
[
1357,
1360
],
[
4390,
4393
],
[
4457,
4460
],
[
4473,
4476
],
[
4510,
4513
]
],
[
[
1368,
1373
],
[
2898,
2903
],
[
3883,
3888
]
],
[
[
1404,
1412
],
[
3848,
3856
]
],
[
[
1420,
1444
],
[
3985,
3988
],
[
3998,
4001
]
],
[
[
1471,
1476
],
[
2071,
2076
]
],
[
[
1532,
1563
],
[
2071,
2076
]
],
[
[
1627,
1657
],
[
2071,
2076
]
],
[
[
1746,
1767
],
[
2071,
2076
]
],
[
[
1863,
1895
],
[
2071,
2076
]
],
[
[
2042,
2051
],
[
4500,
4509
]
],
[
[
2782,
2791
],
[
4489,
4498
]
]
] |
# coding: utf-8
"""
ThinVolumeReinitializeDescriptor.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class ThinVolumeReinitializeDescriptor(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ThinVolumeReinitializeDescriptor - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'thin_vol': 'str', # (required parameter)
'init_params': 'ThinVolumeReinitializeParams'
}
self.attribute_map = {
'thin_vol': 'thinVol', # (required parameter)
'init_params': 'initParams'
}
self._thin_vol = None
self._init_params = None
@property
def thin_vol(self):
"""
Gets the thin_vol of this ThinVolumeReinitializeDescriptor.
The thin volume to re-initialize.
:return: The thin_vol of this ThinVolumeReinitializeDescriptor.
:rtype: str
:required/optional: required
"""
return self._thin_vol
@thin_vol.setter
def thin_vol(self, thin_vol):
"""
Sets the thin_vol of this ThinVolumeReinitializeDescriptor.
The thin volume to re-initialize.
:param thin_vol: The thin_vol of this ThinVolumeReinitializeDescriptor.
:type: str
"""
self._thin_vol = thin_vol
@property
def init_params(self):
"""
Gets the init_params of this ThinVolumeReinitializeDescriptor.
Operation-specific parameters for the re-initialize.
:return: The init_params of this ThinVolumeReinitializeDescriptor.
:rtype: ThinVolumeReinitializeParams
:required/optional: required
"""
return self._init_params
@init_params.setter
def init_params(self, init_params):
"""
Sets the init_params of this ThinVolumeReinitializeDescriptor.
Operation-specific parameters for the re-initialize.
:param init_params: The init_params of this ThinVolumeReinitializeDescriptor.
:type: ThinVolumeReinitializeParams
"""
self._init_params = init_params
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
[
[
1727,
1734
],
[
5076,
5083
]
],
[
[
1751,
1760
],
[
4258,
4267
]
],
[
[
1769,
1801
]
]
] |
"""Option helper functions"""
__docformat__ = "numpy"
import argparse
from typing import List
import pandas as pd
import numpy as np
from gamestonk_terminal.helper_funcs import (
parse_known_args_and_warn,
check_non_negative,
)
# pylint: disable=R1710
def load(other_args: List[str]) -> str:
"""Load ticker into object
Parameters
----------
other_args: List[str]
Agrparse arguments
Returns
-------
str:
Ticker
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="opload",
description="Load a ticker into option menu",
)
parser.add_argument(
"-t",
"--ticker",
action="store",
dest="ticker",
required="-h" not in other_args,
help="Stock ticker",
)
try:
if other_args:
if "-t" not in other_args and "-h" not in other_args:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return ""
print("")
return ns_parser.ticker
except Exception as e:
print(e, "\n")
return ""
except SystemExit:
print("")
return ""
# pylint: disable=no-else-return
def select_option_date(avalaiable_dates: List[str], other_args: List[str]) -> str:
"""Select an option date out of a supplied list
Parameters
----------
avalaiable_dates: List[str]
Possible date options
other_args: List[str]
Arparse arguments
Returns
-------
expiry_date: str
Selected expiry date
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="exp",
description="See and set expiration date",
)
parser.add_argument(
"-d",
"--date",
dest="n_date",
action="store",
type=int,
default=-1,
choices=range(len(avalaiable_dates)),
help="Select index for expiry date.",
)
parser.add_argument(
"-D",
dest="date",
type=str,
choices=avalaiable_dates + [""],
help="Select date (YYYY-MM-DD)",
default="",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-d")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return ""
# Print possible expiry dates
if ns_parser.n_date == -1 and not ns_parser.date:
print("\nAvailable expiry dates:")
for i, d in enumerate(avalaiable_dates):
print(f" {(2 - len(str(i))) * ' '}{i}. {d}")
print("")
return ""
# It means an expiry date was correctly selected
else:
if ns_parser.date:
if ns_parser.date in avalaiable_dates:
print(f"Expiraration set to {ns_parser.date} \n")
return ns_parser.date
else:
print("Expiration not an option")
return ""
else:
expiry_date = avalaiable_dates[ns_parser.n_date]
print(f"Expiraration set to {expiry_date} \n")
return expiry_date
except Exception as e:
print(e, "\n")
return ""
def get_loss_at_strike(strike: float, chain: pd.DataFrame) -> float:
"""Function to get the loss at the given expiry
Parameters
----------
strike: Union[int,float]
Value to calculate total loss at
chain: Dataframe:
Dataframe containing at least strike and openInterest
Returns
-------
loss: Union[float,int]
Total loss
"""
itm_calls = chain[chain.index < strike][["OI_call"]]
itm_calls["loss"] = (strike - itm_calls.index) * itm_calls["OI_call"]
call_loss = itm_calls["loss"].sum()
itm_puts = chain[chain.index > strike][["OI_put"]]
itm_puts["loss"] = (itm_puts.index - strike) * itm_puts["OI_put"]
put_loss = itm_puts.loss.sum()
loss = call_loss + put_loss
return loss
def calculate_max_pain(chain: pd.DataFrame) -> int:
"""Returns the max pain for a given call/put dataframe
Parameters
----------
chain: DataFrame
Dataframe to calculate value from
Returns
-------
max_pain : int
Max pain value
"""
strikes = np.array(chain.index)
if ("OI_call" not in chain.columns) or ("OI_put" not in chain.columns):
print("Incorrect columns. Unable to parse max pain")
return np.nan
loss = []
for price_at_exp in strikes:
loss.append(get_loss_at_strike(price_at_exp, chain))
chain["loss"] = loss
max_pain = chain["loss"].idxmin()
return max_pain
def vol(other_args: List[str]):
"""Parse volume argparse
Parameters
----------
other_args: List[str]
Argparse arguments
Returns
-------
ns_parser: argparse.Namespace
Parsed namespace
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="vol",
description="Plot volume. Volume refers to the number of contracts traded today.",
)
parser.add_argument(
"-m",
"--min",
default=-1,
type=check_non_negative,
help="Min strike to plot",
dest="min",
)
parser.add_argument(
"-M",
"--max",
default=-1,
type=check_non_negative,
help="Max strike to plot",
dest="max",
)
parser.add_argument(
"--calls",
action="store_true",
default=False,
dest="calls",
help="Flag to plot call options only",
)
parser.add_argument(
"--puts",
action="store_true",
default=False,
dest="puts",
help="Flag to plot put options only",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
return ns_parser
except Exception as e:
print(e, "\n")
def voi(other_args: List[str]):
"""Parse Volume + open interest argparse
Parameters
----------
other_args: List[str]
Argparse arguments
Returns
-------
ns_parser: argparse.Namespace
Parsed namespace
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="voi",
description="""
Plots Volume + Open Interest of calls vs puts.
""",
)
parser.add_argument(
"-v",
"--minv",
dest="min_vol",
type=check_non_negative,
default=-1,
help="minimum volume (considering open interest) threshold of the plot.",
)
parser.add_argument(
"-m",
"--min",
dest="min_sp",
type=check_non_negative,
default=-1,
help="minimum strike price to consider in the plot.",
)
parser.add_argument(
"-M",
"--max",
dest="max_sp",
type=check_non_negative,
default=-1,
help="maximum strike price to consider in the plot.",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return None
return ns_parser
except Exception as e:
print(e, "\n")
return None
def oi(other_args: List[str]):
"""Parse Open Interest argparse
Parameters
----------
other_args: List[str]
Argparse arguments
Returns
-------
ns_parser: argparse.Namespace
Parsed namespace
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="oi",
description="Plot open interest. Open interest represents the number of contracts that exist.",
)
parser.add_argument(
"-m",
"--min",
default=-1,
type=check_non_negative,
help="Min strike to plot",
dest="min",
)
parser.add_argument(
"-M",
"--max",
default=-1,
type=check_non_negative,
help="Max strike to plot",
dest="max",
)
parser.add_argument(
"--calls",
action="store_true",
default=False,
dest="calls",
help="Flag to plot call options only",
)
parser.add_argument(
"--puts",
action="store_true",
default=False,
dest="puts",
help="Flag to plot put options only",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return None
return ns_parser
except Exception as e:
print(e, "\n")
return None
| [
[
[
30,
43
]
],
[
[
62,
70
],
[
491,
499
],
[
564,
572
],
[
1716,
1724
],
[
1789,
1797
],
[
5191,
5199
],
[
5264,
5272
],
[
6755,
6763
],
[
6828,
6836
],
[
8284,
8292
],
[
8357,
8365
]
],
[
[
90,
94
],
[
287,
291
],
[
1382,
1386
],
[
1405,
1409
],
[
4960,
4964
],
[
6509,
6513
],
[
8046,
8050
]
],
[
[
103,
115
],
[
3543,
3545
],
[
4297,
4299
]
],
[
[
123,
134
],
[
4562,
4564
],
[
4737,
4739
]
],
[
[
186,
211
],
[
1033,
1058
],
[
2471,
2496
],
[
6319,
6344
],
[
7833,
7858
],
[
9424,
9449
]
],
[
[
217,
235
],
[
5512,
5530
],
[
5682,
5700
],
[
7092,
7110
],
[
7312,
7330
],
[
7512,
7530
],
[
8617,
8635
],
[
8787,
8805
]
],
[
[
270,
274
]
],
[
[
1345,
1363
]
],
[
[
3502,
3520
],
[
4812,
4830
]
],
[
[
4271,
4289
]
],
[
[
4944,
4947
]
],
[
[
6493,
6496
]
],
[
[
8031,
8033
]
]
] |
from . import _version
from .core import CondaEnv, CondaPackException, File, pack
__version__ = _version.get_versions()['version']
| [
[
[
14,
22
],
[
97,
105
]
],
[
[
41,
49
]
],
[
[
51,
69
]
],
[
[
71,
75
]
],
[
[
77,
81
]
],
[
[
83,
94
]
]
] |
from abc import ABCMeta, abstractmethod
from multiprocessing import Process, Value
import numpy as np
from flare.common.log import GameLogEntry
from flare.common.communicator import AgentCommunicator
from flare.common.replay_buffer import NoReplacementQueue, ReplayBuffer, Experience
class AgentHelper(object):
"""
AgentHelper abstracts some part of Agent's data processing and the I/O
communication between Agent and ComputationDataProcessor (CDP). It receives a
Communicator from one CDP and uses it to send data to the CDP.
"""
__metaclass__ = ABCMeta
def __init__(self, name, communicator, sample_interval):
assert isinstance(communicator, AgentCommunicator)
self.name = name
self.comm = communicator
self.counter = 0
assert sample_interval >= 2
self.sample_interval = sample_interval
def unpack_exps(self, exp_seqs):
"""
The input `exp_seqs` is always a list of sequences, each sequence
containing multiple Experience instances.
"""
def concat_lists(lists):
return [x for l in lists for x in l]
def extract_key(seq, k):
assert seq
return [e.val(k) for e in seq]
ret = dict(
inputs={},
next_inputs={},
next_alive={},
rewards={},
actions={},
next_actions={},
states=None,
next_states=None)
for k in self.input_keys:
ipt_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs]
ret["inputs"][k] = [ipt_seq[:-1] for ipt_seq in ipt_seqs]
ret["next_inputs"][k] = [ipt_seq[1:] for ipt_seq in ipt_seqs]
for k in self.action_keys:
act_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs]
ret["actions"][k] = [act_seq[:-1] for act_seq in act_seqs]
ret["next_actions"][k] = [act_seq[1:] for act_seq in act_seqs]
for k in self.reward_keys:
ret["rewards"][
k] = [extract_key(exp_seq[:-1], k) for exp_seq in exp_seqs]
if self.state_keys:
ret["states"] = dict()
ret["next_states"] = dict()
for k in self.state_keys:
## we only take the first(second) element of a seq for states(next_states)
ret["states"][
k] = [extract_key(exp_seq[:1], k)[0] for exp_seq in exp_seqs]
ret["next_states"][k] = [
extract_key(exp_seq[1:2], k)[0] for exp_seq in exp_seqs
]
ret["next_alive"]["alive"] \
= [extract_key(exp_seq[1:], "alive") for exp_seq in exp_seqs]
## HERE we decide whether the data are instances or seqs
## according to the existence of states
if not self.state_keys:
# sample instances
for k in ret.keys():
if ret[k] is not None:
for kk in ret[k].keys():
ret[k][kk] = concat_lists(ret[k][kk])
return ret, len(exp_seqs)
def predict(self, inputs, states=dict()):
"""
Process the input data (if necessary), send them to CDP for prediction,
and receive the outcome.
Args:
inputs(dict): data used for prediction. It is caller's job
to make sure inputs contains all data needed and they are in the
right form.
"""
data = dict(inputs=inputs, states=states)
self.comm.put_prediction_data(data, 1)
ret = self.comm.get_prediction_return()
return ret
@abstractmethod
def add_experience(self, e):
"""
Implements how to record an experience.
Will be called by self.store_data()
"""
pass
def _store_data(self, alive, data):
"""
Store the past experience for later use, e.g., experience replay.
Args:
data(dict): data to store.
"""
assert isinstance(data, dict)
data["alive"] = [alive]
t = Experience(data)
self.add_experience(t)
self.counter += 1
if self.counter % self.sample_interval == 0:
return self.learn()
@abstractmethod
def sample_experiences(self):
"""
Implements how to retrieve experiences from past.
Will be called by self.learn()
"""
pass
def learn(self):
"""
Sample data from past experiences and send them to CDP for learning.
Optionally, it receives learning outcomes sent back from CW and does
some processing.
Depends on users' need, this function can be called in three ways:
1. In Agent's run_one_episode
2. In store_data(), e.g., learning once every few steps
3. As a separate thread, e.g., using experience replay
"""
exp_seqs = self.sample_experiences()
if not exp_seqs:
return
data, size = self.unpack_exps(exp_seqs)
self.comm.put_training_data(data, size)
ret = self.comm.get_training_return()
return ret
class OnlineHelper(AgentHelper):
"""
Online helper. It calls `learn()` every `sample_interval`
steps.
While waiting for learning return, the calling `Agent` is blocked.
"""
def __init__(self, name, communicator, sample_interval=5):
super(OnlineHelper, self).__init__(name, communicator, sample_interval)
# NoReplacementQueue used to store past experience.
self.exp_queue = NoReplacementQueue()
@staticmethod
def exp_replay():
return False
def add_experience(self, e):
self.exp_queue.add(e)
def sample_experiences(self):
return self.exp_queue.sample()
class ExpReplayHelper(AgentHelper):
"""
Example of applying experience replay. It starts a separate threads to
run learn().
"""
def __init__(self,
name,
communicator,
buffer_capacity,
num_experiences,
sample_interval=5,
num_seqs=1):
super(ExpReplayHelper, self).__init__(name, communicator,
sample_interval)
# replay buffer for experience replay
self.replay_buffer = ReplayBuffer(buffer_capacity)
self.num_experiences = num_experiences
self.num_seqs = num_seqs
@staticmethod
def exp_replay():
return True
def add_experience(self, e):
self.replay_buffer.add(e)
def sample_experiences(self):
return self.replay_buffer.sample(self.num_experiences, self.num_seqs)
class Agent(Process):
"""
Agent implements the control flow and logics of how Robot interacts with
the environment and does computation. It is a subclass of Process. The entry
function of the Agent process is run().
Some members:
env: the environment
num_games: number of games to run
learning: Whether learn or not (only do testing)
helpers: a dictionary of `AgentHelper`, each corresponds to one
`ComputationTask`
log_q: communication channel between `Agent` and the centralized logger
running: the `Agent` will keep running as long as `running` is True.
"""
__metaclass__ = ABCMeta
def __init__(self, num_games, actrep, learning):
super(Agent, self).__init__()
self.id = -1 # just created, not added to the Robot yet
self.num_games = num_games
self.learning = learning
self.state_specs = None
self.helpers = {}
self.log_q = None
self.running = Value('i', 0)
self.daemon = True ## Process member
self.alive = 1
self.env_f = None
self.actrep = actrep
def set_env(self, env_class, *args, **kwargs):
"""
Set the environment for the agent. For now, only create a lambda
function. Once the agent process starts running, we will call this
function.
env_class: The environment class to create
args, kwargs: The arguments for creating the class
"""
self.env_f = lambda: env_class(*args, **kwargs)
def add_agent_helper(self, helper, input_keys, action_keys, state_keys,
reward_keys):
"""
Add an AgentHelper, with its name (also the name of its
correspoding `ComputationTask`) as key.
"""
assert isinstance(helper, AgentHelper)
helper.input_keys = input_keys
helper.action_keys = action_keys
helper.state_keys = state_keys
helper.reward_keys = reward_keys
self.helpers[helper.name] = helper
def _make_zero_states(self, prop):
dtype = prop["dtype"] if "dtype" in prop else "float32"
return np.zeros(prop["shape"]).astype(dtype)
## The following three functions hide the `AgentHelper` from the users of
## `Agent`.
def predict(self, alg_name, inputs, states=dict()):
## Convert single instances to batches of size 1
## The reason for this conversion is that we want to reuse the
## _pack_data() and _unpack_data() of the CDP for handling both training
## and prediction data. These two functions assume that data are stored
## as mini batches instead of single instances in the prediction and learning
## queues.
inputs_ = {k: [v] for k, v in inputs.items()}
states_ = {k: [v] for k, v in states.items()}
prediction, next_states = self.helpers[alg_name].predict(inputs_,
states_)
## convert back to single instances
prediction = {k: v[0] for k, v in prediction.items()}
next_states = {k: v[0] for k, v in next_states.items()}
return prediction, next_states
def run(self):
"""
Default entry function of Agent process.
"""
assert self.env_f is not None, "You should first call self.set_env()!"
## Only call the env function now to make sure there is only one
## environment (OpenGL context) in each process
self.env = self.env_f()
self.running.value = 1
for i in range(self.num_games):
self._run_one_episode()
if not self.running.value:
return
self.running.value = 0
def _store_data(self, alg_name, data):
if self.learning: ## only store when the agent is learning
return self.helpers[alg_name]._store_data(self.alive, data)
def _run_one_episode(self):
def __store_data(observations, actions, states, rewards):
learning_ret = self._cts_store_data(observations, actions, states,
rewards) ## written by user
if learning_ret is not None:
for k, v in learning_ret.items():
self.log_entry.add_key(k, v)
observations = self._reset_env()
states = self._get_init_states() ## written by user
while self.alive and (not self.env.time_out()):
actions, next_states = self._cts_predict(
observations, states) ## written by user
assert isinstance(actions, dict)
assert isinstance(next_states, dict)
next_observations, rewards, next_game_over = self._step_env(
actions)
__store_data(observations, actions, states, rewards)
observations = next_observations
states = next_states
## next_game_over == 1: success
## next_game_over == -1: failure
self.alive = 1 - abs(next_game_over)
## self.alive: 0 -- success/failure
## 1 -- normal
## -1 -- timeout
if self.env.time_out():
self.alive = -1
actions, _ = self._cts_predict(observations, states)
zero_rewards = {k: [0] * len(v) for k, v in rewards.items()}
__store_data(observations, actions, states, zero_rewards)
## Record success. For games that do not have a defintion of
## 'success' (e.g., 'breakout' never ends), this quantity will
## always be zero
self.log_entry.add_key("success", next_game_over > 0)
return self._total_reward()
def _reset_env(self):
self.alive = 1
## currently we only support a single logger for all CTs
self.log_entry = GameLogEntry(self.id, 'All')
obs = self.env.reset()
assert isinstance(obs, dict)
return obs
def _step_env(self, actions):
next_observations, rewards, next_game_over = self.env.step(actions,
self.actrep)
assert isinstance(next_observations, dict)
assert isinstance(rewards, dict)
self.log_entry.add_key("num_steps", 1)
self.log_entry.add_key("total_reward", sum(map(sum, rewards.values())))
return next_observations, rewards, next_game_over
def _total_reward(self):
self.log_q.put(self.log_entry)
return self.log_entry.total_reward
def _get_init_states(self):
"""
By default, there is no state. The user needs to override this function
to return a dictionary of init states if necessary.
"""
return dict()
@abstractmethod
def _cts_predict(self, observations, states):
"""
The user needs to override this function to specify how different CTs
make predictions given observations and states.
Output: actions: a dictionary of actions, each action being a vector
If the action is discrete, then it is a length-one
list of an integer.
states (optional): a dictionary of states, each state being a floating vector
"""
pass
@abstractmethod
def _cts_store_data(self, observations, actions, states, rewards):
"""
The user needs to override this function to specify how different CTs
store their corresponding experiences, by calling self._store_data().
Each input should be a dictionary.
"""
pass
| [
[
[
16,
23
],
[
573,
580
],
[
7359,
7366
]
],
[
[
25,
39
],
[
3614,
3628
],
[
4231,
4245
],
[
13481,
13495
],
[
14046,
14060
]
],
[
[
68,
75
],
[
6704,
6711
]
],
[
[
77,
82
],
[
7699,
7704
]
],
[
[
90,
101
],
[
8870,
8872
]
],
[
[
131,
143
],
[
12560,
12572
]
],
[
[
182,
199
],
[
683,
700
]
],
[
[
239,
257
],
[
5555,
5573
]
],
[
[
259,
271
],
[
6338,
6350
]
],
[
[
273,
283
],
[
4066,
4076
]
],
[
[
292,
303
],
[
5151,
5162
],
[
5800,
5811
],
[
8535,
8546
]
],
[
[
5138,
5150
],
[
5404,
5416
]
],
[
[
5784,
5799
],
[
6148,
6163
]
],
[
[
6698,
6703
],
[
7435,
7440
]
]
] |
import logging
from typing import List
from homeassistant.helpers.entity import Entity
from gehomesdk import ErdCode, ErdApplianceType
from .washer import WasherApi
from .dryer import DryerApi
from ..entities import GeErdSensor, GeErdBinarySensor
_LOGGER = logging.getLogger(__name__)
class WasherDryerApi(WasherApi, DryerApi):
"""API class for washer/dryer objects"""
APPLIANCE_TYPE = ErdApplianceType.COMBINATION_WASHER_DRYER
def get_all_entities(self) -> List[Entity]:
base_entities = self.get_base_entities()
common_entities = [
GeErdSensor(self, ErdCode.LAUNDRY_MACHINE_STATE),
GeErdSensor(self, ErdCode.LAUNDRY_CYCLE),
GeErdSensor(self, ErdCode.LAUNDRY_SUB_CYCLE),
GeErdBinarySensor(self, ErdCode.LAUNDRY_END_OF_CYCLE),
GeErdSensor(self, ErdCode.LAUNDRY_TIME_REMAINING),
GeErdSensor(self, ErdCode.LAUNDRY_DELAY_TIME_REMAINING),
GeErdBinarySensor(self, ErdCode.LAUNDRY_DOOR),
GeErdBinarySensor(self, ErdCode.LAUNDRY_REMOTE_STATUS),
]
washer_entities = self.get_washer_entities()
dryer_entities = self.get_dryer_entities()
entities = base_entities + common_entities + washer_entities + dryer_entities
return entities
| [
[
[
7,
14
],
[
260,
267
]
],
[
[
34,
38
],
[
475,
479
]
],
[
[
81,
87
],
[
480,
486
]
],
[
[
110,
117
],
[
605,
612
],
[
667,
674
],
[
721,
728
],
[
785,
792
],
[
846,
853
],
[
909,
916
],
[
984,
991
],
[
1043,
1050
]
],
[
[
119,
135
],
[
398,
414
]
],
[
[
157,
166
],
[
310,
319
]
],
[
[
186,
194
],
[
321,
329
]
],
[
[
218,
229
],
[
587,
598
],
[
649,
660
],
[
703,
714
],
[
828,
839
],
[
891,
902
]
],
[
[
231,
248
],
[
761,
778
],
[
960,
977
],
[
1019,
1036
]
],
[
[
250,
257
]
],
[
[
295,
309
]
]
] |
"""Decoding module."""
import numpy as np
import warnings
from . import utils
from numba import njit, int64, types, float64
def decode(H, y, snr, maxiter=1000):
"""Decode a Gaussian noise corrupted n bits message using BP algorithm.
Decoding is performed in parallel if multiple codewords are passed in y.
Parameters
----------
H: array (n_equations, n_code). Decoding matrix H.
y: array (n_code, n_messages) or (n_code,). Received message(s) in the
codeword space.
maxiter: int. Maximum number of iterations of the BP algorithm.
Returns
-------
x: array (n_code,) or (n_code, n_messages) the solutions in the
codeword space.
"""
m, n = H.shape
bits_hist, bits_values, nodes_hist, nodes_values = utils._bitsandnodes(H)
_n_bits = np.unique(H.sum(0))
_n_nodes = np.unique(H.sum(1))
if _n_bits * _n_nodes == 1:
solver = _logbp_numba_regular
bits_values = bits_values.reshape(n, -1)
nodes_values = nodes_values.reshape(m, -1)
else:
solver = _logbp_numba
var = 10 ** (-snr / 10)
if y.ndim == 1:
y = y[:, None]
# step 0: initialization
Lc = 2 * y / var
_, n_messages = y.shape
Lq = np.zeros(shape=(m, n, n_messages))
Lr = np.zeros(shape=(m, n, n_messages))
for n_iter in range(maxiter):
Lq, Lr, L_posteriori = solver(bits_hist, bits_values, nodes_hist,
nodes_values, Lc, Lq, Lr, n_iter)
x = np.array(L_posteriori <= 0).astype(int)
product = utils.incode(H, x)
if product:
break
if n_iter == maxiter - 1:
warnings.warn("""Decoding stopped before convergence. You may want
to increase maxiter""")
return x.squeeze()
output_type_log2 = types.Tuple((float64[:, :, :], float64[:, :, :],
float64[:, :]))
@njit(output_type_log2(int64[:], int64[:], int64[:], int64[:], float64[:, :],
float64[:, :, :], float64[:, :, :], int64), cache=True)
def _logbp_numba(bits_hist, bits_values, nodes_hist, nodes_values, Lc, Lq, Lr,
n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
bits_counter = 0
nodes_counter = 0
for i in range(m):
# ni = bits[i]
ff = bits_hist[i]
ni = bits_values[bits_counter: bits_counter + ff]
bits_counter += ff
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
# mj = nodes[j]
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
nodes_counter = 0
for j in range(n):
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
@njit(output_type_log2(int64[:], int64[:, :], int64[:], int64[:, :],
float64[:, :], float64[:, :, :], float64[:, :, :],
int64), cache=True)
def _logbp_numba_regular(bits_hist, bits_values, nodes_hist, nodes_values, Lc,
Lq, Lr, n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
for i in range(m):
ni = bits_values[i]
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
mj = nodes_values[j]
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
for j in range(n):
mj = nodes_values[j]
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
def get_message(tG, x):
"""Compute the original `n_bits` message from a `n_code` codeword `x`.
Parameters
----------
tG: array (n_code, n_bits) coding matrix tG.
x: array (n_code,) decoded codeword of length `n_code`.
Returns
-------
message: array (n_bits,). Original binary message.
"""
n, k = tG.shape
rtG, rx = utils.gausselimination(tG, x)
message = np.zeros(k).astype(int)
message[k - 1] = rx[k - 1]
for i in reversed(range(k - 1)):
message[i] = rx[i]
message[i] -= utils.binaryproduct(rtG[i, list(range(i+1, k))],
message[list(range(i+1, k))])
return abs(message)
| [
[
[
30,
41
],
[
810,
812
],
[
845,
847
],
[
1239,
1241
],
[
1284,
1286
],
[
1511,
1513
],
[
2546,
2548
],
[
2703,
2705
],
[
2857,
2859
],
[
3180,
3182
],
[
3634,
3636
],
[
4433,
4435
],
[
4590,
4592
],
[
4744,
4746
],
[
5067,
5069
],
[
5410,
5412
],
[
5987,
5989
]
],
[
[
49,
57
],
[
1664,
1672
]
],
[
[
72,
77
],
[
772,
777
],
[
1569,
1574
],
[
5942,
5947
],
[
6129,
6134
]
],
[
[
97,
101
],
[
1921,
1925
],
[
3913,
3917
]
],
[
[
103,
108
],
[
1943,
1948
],
[
1953,
1958
],
[
1963,
1968
],
[
1973,
1978
],
[
2058,
2063
],
[
3935,
3940
],
[
3945,
3950
],
[
3958,
3963
],
[
3968,
3973
],
[
4079,
4084
]
],
[
[
110,
115
],
[
1822,
1827
]
],
[
[
117,
124
],
[
1835,
1842
],
[
1853,
1860
],
[
1902,
1909
],
[
1983,
1990
],
[
2021,
2028
],
[
2040,
2047
],
[
4004,
4011
],
[
4019,
4026
],
[
4038,
4045
]
],
[
[
131,
137
]
],
[
[
1803,
1819
],
[
1926,
1942
],
[
3918,
3934
]
],
[
[
2082,
2094
],
[
1064,
1076
]
],
[
[
4103,
4123
],
[
915,
935
]
],
[
[
5583,
5594
]
]
] |
import logging
import time
import sh
logger = logging.getLogger(__name__)
def is_command_available(name):
try:
sh.bash('which', name)
except sh.ErrorReturnCode:
return False
else:
return True
class KubernetesDependency:
def ensure_running(self):
logger.debug('Checking if container "{}" is running...'.format(self.name))
if self.is_container_running():
logger.debug('"{}" is running'.format(self.name))
else:
logger.debug('Starting "{}"...'.format(self.name))
self.run_container()
logger.debug('"{}" started'.format(self.name))
def run_container(self):
self._apply_definition()
self._wait_until_ready()
self._wait_for_started_log()
def _apply_definition(self):
sh.kubectl('apply', '--record', '-f', self.definition)
try:
sh.kubectl('expose', '-f', self.definition)
except sh.ErrorReturnCode_1 as e:
if b'already exists' not in e.stderr:
raise e
else:
logger.debug('Service for "{}" exists'.format(self.name))
def _wait_until_ready(self):
logger.debug('Waiting for "{}" to start (possibly downloading image)...'.format(self.name))
ready = False
while not ready:
ready = self.is_container_running()
if not ready:
time.sleep(1)
logger.debug('"{}" started'.format(self.name))
def _wait_for_started_log(self):
logger.debug('Waiting for started log for "{}"...'.format(self.name))
for log in sh.kubectl('logs', '-f', self.pod_name, _iter='out'):
if self.started_log in log:
break
logger.debug('Started log for "{}" found'.format(self.name))
def is_container_running(self):
try:
container_ready = str(sh.kubectl(
'get', 'pods',
'--selector', self.selector,
'--output', 'jsonpath="{.items[*].status.containerStatuses[*].ready}"',
)).strip()
except sh.ErrorReturnCode as e:
logger.debug(e)
return False
else:
return container_ready == '"true"'
def run_command(self, *args):
return sh.kubectl('exec', self.pod_name, '--', *args)
@property
def pod_name(self):
return str(sh.kubectl(
'get', 'pods',
'--output', 'custom-columns=NAME:.metadata.name',
'--no-headers',
'--selector', self.selector,
)).strip()
@property
def selector(self):
return 'app={}'.format(self.name)
@property
def started_log(self):
raise NotImplementedError
@property
def name(self):
raise NotImplementedError
@property
def definition(self):
raise NotImplementedError
| [
[
[
7,
14
],
[
48,
55
]
],
[
[
22,
26
],
[
1424,
1428
]
],
[
[
35,
37
],
[
127,
129
],
[
161,
163
],
[
821,
823
],
[
901,
903
],
[
960,
962
],
[
1628,
1630
],
[
1897,
1899
],
[
2111,
2113
],
[
2300,
2302
],
[
2405,
2407
]
],
[
[
39,
45
],
[
300,
306
],
[
427,
433
],
[
503,
509
],
[
599,
605
],
[
1095,
1101
],
[
1195,
1201
],
[
1446,
1452
],
[
1539,
1545
],
[
1752,
1758
],
[
2148,
2154
]
],
[
[
82,
102
]
],
[
[
240,
260
]
]
] |
class UnsafeUtility: pass | [
[
[
6,
19
]
]
] |
import enum
from itertools import chain
from django.contrib.auth.models import AbstractUser, UserManager as DjangoUserManager
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
import pyotp
from zentral.utils.base64 import trimmed_urlsafe_b64decode
class UserManager(DjangoUserManager):
pass
class User(AbstractUser):
email = models.EmailField(unique=True)
is_remote = models.BooleanField(default=False)
is_service_account = models.BooleanField(default=False)
password_updated_at = models.DateTimeField(blank=True, null=True, editable=False)
objects = UserManager()
class Meta:
ordering = ("username",)
def __str__(self):
if self.is_service_account:
return self.username
else:
return self.email or self.username
def get_type_display(self):
return "user" if not self.is_service_account else "service account"
def get_absolute_url(self):
return reverse("accounts:user", args=(self.pk,))
def set_password(self, *args, **kwargs):
if not self.is_remote and not self.is_service_account:
super().set_password(*args, **kwargs)
self.password_updated_at = timezone.now()
else:
self.set_unusable_password()
def save(self, *args, **kwargs):
if self.is_service_account:
# service accounts cannot be superusers
self.is_superuser = False
if self.is_service_account or self.is_remote:
# service accounts or remote users cannot have a valid password
self.set_unusable_password()
else:
if self.pk:
old_user = self._meta.model.objects.get(pk=self.pk)
if old_user.password != self.password:
if old_user.has_usable_password():
UserPasswordHistory.objects.create(
user=self,
password=old_user.password,
created_at=old_user.password_updated_at or old_user.date_joined
)
self.password_updated_at = timezone.now()
elif self.password:
self.password_updated_at = timezone.now()
super().save(*args, **kwargs)
def username_and_email_editable(self):
return not self.is_remote
def is_superuser_editable(self):
return (not self.is_superuser or
User.objects.exclude(pk=self.pk).filter(is_superuser=True).count() > 0)
def editable(self):
return self.username_and_email_editable() or self.is_superuser_editable()
def deletable(self):
return not self.is_superuser
@cached_property
def has_verification_device(self):
return len(self._all_verification_devices) > 0
@cached_property
def _all_verification_devices(self):
return list(chain(self.usertotp_set.all(),
self.userwebauthn_set.all()))
def get_verification_devices(self):
return sorted(self._all_verification_devices,
key=lambda vd: vd.name)
def get_prioritized_verification_devices(self, user_agent):
verification_devices = sorted(self._all_verification_devices,
key=lambda vd: (-1 * vd.PRIORITY, vd.name))
ua_verification_devices = [vd for vd in verification_devices if vd.test_user_agent(user_agent)]
if not ua_verification_devices and verification_devices:
raise ValueError("No verification devices compatible with this user agent")
else:
return ua_verification_devices
@cached_property
def group_name_set(self):
"""A set with all the group names. Used for authz."""
return set(self.groups.values_list("name", flat=True))
class UserPasswordHistory(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
password = models.CharField(_('password'), max_length=128)
created_at = models.DateTimeField(editable=False)
class UserVerificationDevice(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
def get_type_for_display(self):
return self.TYPE
def __str__(self):
return "{} {}".format(self.get_type_for_display(), self.name)
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.pk,))
def serialize_for_event(self):
return {"type": self.TYPE,
"pk": self.pk}
class UserTOTP(UserVerificationDevice):
TYPE = "TOTP"
PRIORITY = 10
secret = models.CharField(max_length=256)
delete_url_name = "accounts:delete_totp"
class Meta:
unique_together = (("user", "name"),)
def get_verification_url(self):
return reverse("accounts:verify_totp")
def verify(self, code):
return pyotp.TOTP(self.secret).verify(code)
def test_user_agent(self, user_agent):
return True
class WebAuthnTransport(enum.Enum):
USB = "usb"
NFC = "nfc"
BLE = "ble"
INTERNAL = "internal"
@classmethod
def choices(cls):
return tuple((i.value, i.value) for i in cls)
class UserWebAuthn(UserVerificationDevice):
TYPE = "WebAuthn"
PRIORITY = 100
delete_url_name = "accounts:delete_webauthn_device"
key_handle = models.TextField()
public_key = models.BinaryField()
rp_id = models.TextField()
transports = ArrayField(models.CharField(max_length=8, choices=WebAuthnTransport.choices()))
sign_count = models.PositiveIntegerField()
class Meta:
unique_together = (("user", "key_handle"), ("user", "name"))
def get_type_for_display(self):
return "Security key"
def get_verification_url(self):
return reverse("accounts:verify_webauthn")
def test_user_agent(self, user_agent):
return True
def get_key_handle_bytes(self):
return trimmed_urlsafe_b64decode(self.key_handle)
def get_appid(self):
if self.rp_id.startswith("https://"):
# legacy U2F registration
return self.rp_id
| [
[
[
7,
11
],
[
5397,
5401
]
],
[
[
34,
39
],
[
3100,
3105
]
],
[
[
79,
91
],
[
517,
529
]
],
[
[
93,
125
],
[
475,
492
]
],
[
[
169,
179
],
[
5842,
5852
]
],
[
[
202,
208
],
[
544,
550
],
[
591,
597
],
[
651,
657
],
[
712,
718
],
[
4064,
4070
],
[
4090,
4096
],
[
4124,
4130
],
[
4155,
4161
],
[
4220,
4226
],
[
4288,
4294
],
[
4314,
4320
],
[
4348,
4354
],
[
4375,
4381
],
[
4425,
4431
],
[
4482,
4488
],
[
5001,
5007
],
[
5737,
5743
],
[
5773,
5779
],
[
5806,
5812
],
[
5853,
5859
],
[
5939,
5945
]
],
[
[
233,
240
],
[
1162,
1169
],
[
4761,
4768
],
[
5194,
5201
],
[
6174,
6181
]
],
[
[
266,
274
],
[
1402,
1410
],
[
2343,
2351
],
[
2433,
2441
]
],
[
[
311,
326
],
[
2907,
2922
],
[
3023,
3038
],
[
3865,
3880
]
],
[
[
364,
382
],
[
4172,
4173
]
],
[
[
390,
395
],
[
5270,
5275
]
],
[
[
429,
454
],
[
6326,
6351
]
],
[
[
463,
474
],
[
787,
798
]
],
[
[
512,
516
],
[
4108,
4112
],
[
4332,
4336
],
[
2659,
2663
]
],
[
[
4044,
4063
],
[
2047,
2066
]
],
[
[
4265,
4287
],
[
4927,
4949
],
[
5598,
5620
]
],
[
[
4918,
4926
]
],
[
[
5379,
5396
],
[
5892,
5909
]
],
[
[
5585,
5597
]
]
] |
from deepdab.ai import *
class TDZeroPolicy(TabularPolicy):
def __init__(self, board_size, learning_rate=0.0, gamma=0.0, epsilon=0.0, initial_state_value=0.0, table_file_path=None):
super(TDZeroPolicy, self).__init__(board_size=board_size, epsilon=epsilon,
initial_state_value=initial_state_value, table_file_path=table_file_path)
self._learning_rate = learning_rate
self._gamma = gamma
def update_value(self, reward, initial_state, selected_state):
initial_state_string = self._find_state_string(initial_state)
selected_state_string = self._find_state_string(selected_state)
initial_state_value = self._value_table[initial_state_string]
selected_state_value = self._value_table[selected_state_string]
self._value_table[initial_state_string] = initial_state_value + self._learning_rate * (reward + (self._gamma * selected_state_value) - initial_state_value)
| [
[
[
23,
24
],
[
46,
59
]
],
[
[
33,
45
],
[
202,
214
]
]
] |
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("SGDClassifier" , "digits" , "db2")
| [
[
[
52,
72
],
[
75,
84
]
]
] |
from celery import shared_task
from .signals import slack_event_received
@shared_task
def receive_slack_signal_task(sender, event_type, event_data, **data):
slack_event_received.send(sender=sender, event_type=event_type, event_data=event_data, **data)
| [
[
[
19,
30
],
[
77,
88
]
],
[
[
53,
73
],
[
164,
184
]
],
[
[
93,
118
]
]
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 3 19:19:34 2019
@author: sercangul
"""
from math import erf
std = 10
h1 = 80
h2 = 60
mean = 70
def N(mean, std, x):
return 0.5 + 0.5 * erf((x-mean)/(std* 2**0.5))
print (round(((1 - N(mean,std,h1))*100),2))
print (round(((1 - N(mean,std,h2))*100),2))
print (round(((N(mean,std,h2)*100)),2)) | [
[
[
129,
132
],
[
214,
217
]
],
[
[
134,
137
],
[
269,
272
],
[
313,
316
],
[
353,
356
]
],
[
[
143,
145
],
[
273,
275
]
],
[
[
151,
153
],
[
317,
319
],
[
357,
359
]
],
[
[
159,
163
],
[
264,
268
],
[
308,
312
],
[
348,
352
]
],
[
[
174,
175
],
[
262,
263
],
[
306,
307
],
[
346,
347
]
]
] |
import requests
import requests_cache
from bs4 import BeautifulSoup
import json
from lxml import html
import pdb
import re
import sys
import logging
import datetime
import time
# import winsound
from jinja2 import Environment, FileSystemLoader
import math
import itertools
from playsound import playsound
class Planet:
# planet's current location prediction could be scattered throughout the sky. What is (maxRa, maxDec) (in arc seconds) until we discard the planet
maxScatteredness = (1500, 1000)
# Warn when object is scattered (but don't flag it as discarded)
maxScatterednessWarning = (1000, 800)
# Min score for planet to be worth observing
minScore = 25
# Min Magnitude
minMagnitude = 22
#maxNotSeenDays
maxNotSeenDays = 4
def __init__(self, info):
parts = info.split()
self.name = parts[0]
self.score = int(parts[1])
self.numObservations = int(parts[12])
self.arc = float(parts[-3])
self.notSeenDays = float(parts[-1])
# Rectacension
self.ra = float(parts[5])
# Declination
self.dec = float(parts[6])
self.magnitude = float(parts[7])
# Object not good for observing
self.discard = False
self.scatterednessUrl = False
def analyzePlanet(self):
# pdb.set_trace()
print("\n" + str(datetime.datetime.utcnow()) + " Working on: " + self.name)
self.getEphemerides()
if self.haveWeObserved():
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: we have observed it already before')
if self.score < Planet.minScore:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: score too low (' + str(self.score) + ')')
if self.scatterednessUrl:
self.scatteredness = self.getScatteredness()
if self.scatteredness[0] > Planet.maxScatteredness[0] or self.scatteredness[1] > Planet.maxScatteredness[1]:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: predicted locations too scattered (' + str(self.scatteredness[0]) + ', ' + str(self.scatteredness[1]) + ')')
elif self.scatteredness[0] > Planet.maxScatterednessWarning[0] or self.scatteredness[1] > Planet.maxScatterednessWarning[1]:
logging.warning('Location of planet ' + self.name + ' is very scattered! (' + str(self.scatteredness[0]) + ', ' + str(self.scatteredness[1]) + ')')
# pdb.set_trace()
# filter not seen > 1.2 days
if self.notSeenDays > Planet.maxNotSeenDays:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: too long not seen (' + str(self.notSeenDays) + ' days)')
# Get Max Altitude
# TODO - do something with maximum altitude
if len(self.ephemerides) > 0:
self.maxAltitudeEphemeride = self.maxAlt()
if self.maxAltitudeEphemeride:
pass
# print("Max Altitude Date: " + self.maxAltitudeEphemeride.date)
if self.maxAltitudeEphemeride.effMagnitude > Planet.minMagnitude:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: effective magnitude too low (' + str(self.maxAltitudeEphemeride.effMagnitude) + ')' + ' Magnitude (' + str(self.maxAltitudeEphemeride.magnitude) + ')')
else:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: no maximum altitude obtained')
self.nearestToNow()
self.currentEphemerideInterpolation()
else:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: no ephemerides available')
if not self.discard:
logging.warning('PLANET OK: ' + self.name)
def getEphemerides(self):
url = "https://cgi.minorplanetcenter.net/cgi-bin/confirmeph2.cgi"
# print(self.name)
resp = requests.post(url, data={"mb": -30, "mf": 30, "dl": -90, "du": +90, "nl": 0, "nu": 100, "sort": "d", "W": "j", "obj": self.name, "Parallax": 1, "obscode": "L01", "long": None, "lat": None, "alt": None, "int": 1, "start": 0, "raty": "a", "mot": "m", "dmot": "p", "out": "f", "sun": "x", "oalt": 20})
resp1 = resp.text
page = BeautifulSoup(resp1, "html5lib")
links = page.find("pre")
lines = (links.text).split("\n")
lines = lines[2:-1]
lines = [l for l in lines if "<suppressed>" not in l]
# if self.name == 'ZTF00Wh':
# pdb.set_trace()
# if html.find("pre").find_all('a')[2]['href']
if len(page.find("pre").find_all('a')) > 1 and page.find("pre").find_all('a')[1]['href']:
self.scatterednessUrl = page.find("pre").find_all('a')[1]['href']
tree = html.fromstring(resp.content)
mapLinks = tree.xpath("//pre/a[text()='Map']/@href")
if len(mapLinks) > 0:
self.mapLink = mapLinks[0]
if len(tree.xpath("//a[text()='observations']/@href")) > 0:
self.observationsUrl = tree.xpath("//a[text()='observations']/@href")[0]
self.ephemerides = []
ephReport = {}
for l in lines:
eph = Ephemeride(l)
if eph.isValid():
self.ephemerides.append(eph)
ephReport["OK"] = ephReport["OK"] + 1 if "OK" in ephReport else 1
else:
ephReport[eph.discardReason] = ephReport[eph.discardReason] + 1 if eph.discardReason in ephReport else 1
if len(ephReport):
print("Ephemerides report: " + json.dumps(ephReport))
# print(ephDropReasons)
def maxAlt(self):
maxAlt = float("-inf")
index = None
# logging.warning('Obtaining efemeride for: ' + self.name)
for i, eph in enumerate(self.ephemerides):
# logging.warning('Eph.alt: ' + str(eph.alt))
if eph.alt > maxAlt:
maxAlt = eph.alt
index = i
if index is None:
self.discard = True
return None
return self.ephemerides[index]
def nearestToNow(self):
secondsFromNow = float("inf")
index = None
for i, eph in enumerate(self.ephemerides):
if eph.secondsFromNowPlus600() < secondsFromNow:
secondsFromNow = eph.secondsFromNowPlus600()
index = i
if isinstance(index, int):
self.nearestToNowEphemeride = self.ephemerides[index]
return index
return None
def currentEphemerideInterpolation(self):
index = self.nearestToNow()
interpolatedEph = Ephemeride(self.ephemerides[index].line)
if index:
eph = Ephemeride(self.ephemerides[index].line)
if eph.secondsFromNowPlus600() > 0:
if len(self.ephemerides) > index + 1:
currentEph = eph
nextEph = self.ephemerides[index + 1]
else:
self.currentInterpolatedEphemeride = eph
return eph
elif eph.secondsFromNowPlus600() == 0:
self.currentInterpolatedEphemeride = eph
return eph
else:
if index > 0:
currentEph = self.ephemerides[index - 1]
nextEph = eph
else:
self.currentInterpolatedEphemeride = eph
return eph
timeInterval = nextEph.dateUnix - currentEph.dateUnix
dt = time.mktime(datetime.datetime.utcnow().timetuple()) + 600 - currentEph.dateUnix
dtPerc = dt / timeInterval
interpolatedEph.azimuth = currentEph.azimuth + ((nextEph.azimuth - currentEph.azimuth) * dtPerc)
interpolatedEph.alt = currentEph.alt + ((nextEph.alt - currentEph.alt) * dtPerc)
interpolatedEph.dateUnix = currentEph.dateUnix + dt
interpolatedEph.updateLineFromData()
# print('Interpolated Ephemeride: ')
# print(interpolatedEph.line)
self.currentInterpolatedEphemeride = interpolatedEph
return interpolatedEph
self.currentInterpolatedEphemeride = None
return None
# Have we observed the planet before
def haveWeObserved(self):
resp = requests.get(self.observationsUrl)
tree = html.fromstring(resp.content)
text = tree.xpath('//pre/text()')
# pdb.set_trace()
if re.search("L01\n", text[0]):
return True
return False
# scatteredness of results
def getScatteredness(self):
resp = requests.get(self.scatterednessUrl).text
html = BeautifulSoup(resp, "html5lib")
links = html.find("pre")
observationPoints = re.findall(r'([+-][0-9]+) +([+-][0-9]+).*Ephemeris # [0-9]+$', links.text, re.M)
minRa, maxRa, minDec, maxDec = 0, 0, 0, 0
for point in observationPoints:
if int(point[0]) < minRa:
minRa = int(point[0])
elif int(point[0]) > maxRa:
maxRa = int(point[0])
if int(point[1]) < minDec:
minDec = int(point[1])
elif int(point[1]) > maxDec:
maxDec = int(point[1])
return (maxRa - minRa, maxDec - minDec)
# planet1 = Planet()
class Ephemeride:
# Maximum sun altiude (otherwise we can't observe the planet)
maxSunAlt = -15
# Minimum altitude of object (below can't be seen due to horizon or clouds)
minAlt = 15
# Minimum distance of object from the Moon
minMoonDistance = 20
# Minimum motion (speed = "/min)
minMotion = 2.5
# Why did the ephemeride get discarded (if at all)
discardReason = ''
def __init__(self, info):
# Date UT * R.A. (J2000) Decl. Elong. V Motion Object Sun Moon
# h m "/min P.A. Azi. Alt. Alt. Phase Dist. Alt.
# 2018 10 12 1900 23 26 39.1 +30 55 48 146.2 22.0 0.22 129.4 289 +62 -28 0.15 114 -03
self.line = info
parts = self.line.split()
self.date = parts[0] + ' ' + parts[1] + ' ' + parts[2] + ' ' + parts[3]
self.dateUnix = time.mktime(datetime.datetime.strptime(self.date, "%Y %m %d %H%M").timetuple())
# Azimuth of object at that time
self.azimuth = float(parts[14])
# Altitude of object (above horizon) at that time
self.alt = float(parts[15])
# Altitude of sun at the time
self.sunAlt = float(parts[16])
# Distance from the moon
self.moonDistance = float(parts[18])
self.magnitude = float(parts[11])
# Effective magnitude - Manitude that takes into account atmospheric extiction due to (low) altitude of planet
self.effMagnitude = self.getEffectiveMagnitude()
self.motion = float(parts[12])
# Observation time needed (in minutes) - approximates the imaging time needed to get a good picture
self.observationTime = self.getObservationTime()
# pdb.set_trace()
# logging.warning('Magnitude vs Effective Magnitude: ' + str(self.magnitude) + " : " + str(self.effMagnitude))
def isValid(self):
if self.sunAlt > Ephemeride.maxSunAlt:
self.discardReason = 'nearSun'
return False
if self.alt < Ephemeride.minAlt:
self.discardReason = 'altLow'
return False
if self.moonDistance < Ephemeride.minMoonDistance:
self.discardReason = 'nearMoon'
return False
if self.dateUnix > Main.endObservationTimestamp:
self.discardReason = 'tooLate'
return False
if self.motion < Ephemeride.minMotion:
self.discardReason = 'tooSlow'
return False
return True
def getEffectiveMagnitude(self):
if self.alt < 40:
return self.magnitude + ((self.alt - 40) * 0.1)
else:
return self.magnitude
def getObservationTime(self):
return round(10 + (self.effMagnitude - 18) * 5, 2)
def secondsFromNowPlus600(self):
""" Number of seconds from (Now + 600 seconds) """
currentTimePlus600 = time.mktime(datetime.datetime.utcnow().timetuple()) + 600
return math.fabs(self.dateUnix - currentTimePlus600)
def updateLineFromData(self):
line = self.line.split(' ')
line[0] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%Y")
line[1] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%m")
line[2] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%d")
line[3] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%H%M")
# Azimuth & Alititude
line[22] = str(round(self.azimuth)).zfill(3)
line[24] = str(round(self.alt)) if self.alt < 0 else ('+' + str(round(self.alt)))
self.line = ' '.join(line)
class Map:
def __init__(self, planets):
renderPlanets = []
for planet in planets:
if not planet.discard and planet.currentInterpolatedEphemeride:
# pdb.set_trace()
renderDict = {}
renderDict["name"] = planet.name
renderDict["magnitude"] = planet.currentInterpolatedEphemeride.magnitude
# Displacement from center of map
radius = (90 - planet.currentInterpolatedEphemeride.alt)
# Angle of displacement
angle = math.radians(planet.currentInterpolatedEphemeride.azimuth + 180)
# Convert the radius and angle to X and Y
renderDict["coordinates"] = []
renderDict["coordinates"].append(-(1000 * radius * math.sin(angle) / 90) + 1250)
renderDict["coordinates"].append(-(1000 * radius * math.cos(angle) / 90) + 1250)
renderPlanets.append(renderDict)
env = Environment(loader=FileSystemLoader('.'))
template = env.get_template('skymap.template')
output = template.render({"params": renderPlanets})
with open('skymap.html', 'w') as f:
f.write(output)
class Main:
# Observation date starts at 11:00 and ends next day at 10:59:59 (for file timestamps, etc)
observationDate = datetime.datetime.utcnow() - datetime.timedelta(hours=11)
# observation ends next day at 11:00:00 (so we'll discard later observations, etc)
endObservationTimestamp = time.mktime((observationDate.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(hours=35)).timetuple())
def __init__(self):
global debugging
if debugging:
# Cache all server responses to make faster development
requests_cache.install_cache('reqeusts-cache', allowable_methods=('GET', 'POST'))
self.planets = []
self.repeatMode = True
self.beeperOn = False
self.firstRun = True
self.setInitParams()
self.getData()
self.writeToFile()
Map(self.planets)
print('\nFirst run completed successfully! Now go, play! Make something big!')
# pdb.set_trace()
while self.repeatMode:
if self.firstRun:
print("\n=============================================================")
self.firstRun = False
self.beeperOn = True
self.getData()
self.writeToFile()
Map(self.planets)
time.sleep(300)
def setInitParams(self):
repeat = input('Continuous observation True/False (' + str(self.repeatMode) + ')? ')
if re.fullmatch(r'(True)|(False)', repeat):
if repeat == 'True':
self.repeatMode = True
else:
self.repeatMode = False
print('Continuous observation: ' + str(self.repeatMode))
minScore = input('Minimum score (' + str(Planet.minScore) + ')? ')
if minScore.isdigit():
Planet.minScore = int(minScore)
print('Minimum score: ' + str(Planet.minScore))
minMagnitude = input('Minimum efective magnitude (' + str(Planet.minMagnitude) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minMagnitude):
Planet.minMagnitude = float(minMagnitude)
print('Minimum efective magnitude: ' + str(Planet.minMagnitude))
minAlt = input('Minimum altitude (' + str(Ephemeride.minAlt) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minAlt):
Ephemeride.minAlt = float(minAlt)
print('Minimum altitude: ' + str(Ephemeride.minAlt))
maxScatteredness1 = input('Maximum scateredness in x coordinate (' + str(Planet.maxScatteredness[0]) + ')? ')
if maxScatteredness1.isdigit():
Planet.maxScatteredness = (int(maxScatteredness1), Planet.maxScatteredness[1])
maxScatteredness2 = input('Maximum scateredness in y coordinate (' + str(Planet.maxScatteredness[1]) + ')? ')
if maxScatteredness2.isdigit():
Planet.maxScatteredness = (Planet.maxScatteredness[0], int(maxScatteredness2))
print('Maximum scateredness: (' + str(Planet.maxScatteredness[0]) + ', ' + str(Planet.maxScatteredness[1]) + ')')
maxNotSeenDays = input('Maximum not seen days (' + str(Planet.maxNotSeenDays) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', maxNotSeenDays):
Planet.maxNotSeenDays = float(maxNotSeenDays)
print('Maximum not seen days: ' + str(Planet.maxNotSeenDays))
maxSunAlt = input('Maximum sun altitude (' + str(Ephemeride.maxSunAlt) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', maxSunAlt):
Ephemeride.maxSunAlt = float(maxSunAlt)
print('Maximum sun altitude: ' + str(Ephemeride.maxSunAlt))
minMoonDistance = input('Minimum distance from the moon (' + str(Ephemeride.minMoonDistance) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minMoonDistance):
Ephemeride.minMoonDistance = float(minMoonDistance)
print('Minimum distance from the moon: ' + str(Ephemeride.minMoonDistance))
minMotion = input('Minimum motion (speed) (' + str(Ephemeride.minMotion) + ')? ')
if re.fullmatch(r'[0-9]+\.?[0-9]*', minMotion):
Ephemeride.minMotion = float(minMotion)
print('Minimum motion (speed): ' + str(Ephemeride.minMotion))
def getData(self):
url = "https://www.minorplanetcenter.net/iau/NEO/neocp.txt"
resp = requests.get(url).text[:-1].split("\n")
currentPlanets = []
for planetString in resp:
# for planetString in itertools.islice(resp, 0, 4):
p = Planet(planetString)
currentPlanets.append(p.name)
if p.name not in (i.name for i in self.planets):
p.analyzePlanet()
self.planets.append(p)
if self.beeperOn:
playsound('up.wav')
else:
# print('Plane already known (' + p.name + ')')
pass
# Did any of planets get removed?
if not self.firstRun:
for i in range(len(self.planets) -1, -1, -1):
if self.planets[i].name not in currentPlanets:
del self.planets[i]
if not self.planets[i].discard:
print('\n' + str(datetime.datetime.utcnow()) + ' Planet ' + self.planets[i].name + ' was removed!')
playsound('down.wav')
elif not self.planets[i].discard:
# Update the nearest to now ephemeride (so it can be put into file)
self.planets[i].nearestToNow()
self.planets[i].currentEphemerideInterpolation()
def sortByMaxAlt(self):
return sorted([p for p in self.planets if not p.discard], key=lambda planet: planet.maxAltitudeEphemeride.dateUnix)
def writeToFile(self):
# logging.warning('Writing output to file')
# pdb.set_trace()
with open(Main.observationDate.strftime("%Y-%m-%d") + ".txt", "w") as f:
header = """Date UT * R.A. (J2000) Decl. Elong. V Motion Object Sun Moon
h m "/min P.A. Azi. Alt. Alt. Phase Dist. Alt."""+"\n\n\n"
f.write(header + "\n")
sortedPlanets = self.sortByMaxAlt()
for p in sortedPlanets:
if not p.discard:
# pdb.set_trace()
fileLine = "* " + p.name + " score=" + str(p.score) + ', obs=' + str(p.numObservations) + ', arc=' + str(p.arc) + ', notSeen=' + str(p.notSeenDays) + "days, obsExposure=" + str(p.maxAltitudeEphemeride.observationTime) + 'min'
if hasattr(p, 'scatteredness'):
fileLine += ', scatteredness=(' + str(p.scatteredness[0]) + ',' + str(p.scatteredness[1]) + ')'
if hasattr(p, 'mapLink'):
fileLine += ', mapLink=' + p.mapLink
f.write(fileLine + "\n")
# Comment out highest ephemeride
f.write("// " + p.maxAltitudeEphemeride.line + "\n")
# And print current ephemeride
f.write("// " + p.nearestToNowEphemeride.line + "\n")
# And print current interpolated ephemeride
if p.currentInterpolatedEphemeride:
f.write(p.currentInterpolatedEphemeride.line + "\n\n")
else:
f.write(p.nearestToNowEphemeride.line + "\n\n")
f.close()
if __name__ == "__main__":
debugging = False
if '--debug' in sys.argv:
debugging = True
# logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(message)s")
# Start the program
main = Main()
# pdb.set_trace()
| [
[
[
7,
15
],
[
4147,
4155
],
[
8547,
8555
],
[
8860,
8868
],
[
18786,
18794
]
],
[
[
23,
37
],
[
15038,
15052
]
],
[
[
54,
67
],
[
4487,
4500
],
[
8916,
8929
]
],
[
[
75,
79
],
[
5790,
5794
]
],
[
[
97,
101
],
[
5000,
5004
],
[
8597,
8601
]
],
[
[
109,
112
]
],
[
[
120,
122
],
[
8706,
8708
],
[
9010,
9012
],
[
9086,
9088
],
[
15927,
15929
],
[
16478,
16480
],
[
16748,
16750
],
[
17630,
17632
],
[
17913,
17915
],
[
18205,
18207
],
[
18511,
18513
]
],
[
[
130,
133
],
[
22050,
22053
]
],
[
[
141,
148
],
[
22125,
22132
],
[
22151,
22158
],
[
1534,
1541
],
[
1718,
1725
],
[
2087,
2094
],
[
2410,
2417
],
[
2723,
2730
],
[
3301,
3308
],
[
3584,
3591
],
[
3816,
3823
],
[
3956,
3963
]
],
[
[
156,
164
],
[
14586,
14594
],
[
14615,
14623
],
[
14843,
14851
],
[
1365,
1373
],
[
7775,
7783
],
[
10505,
10513
],
[
12511,
12519
],
[
12707,
12715
],
[
12787,
12795
],
[
12867,
12875
],
[
12947,
12955
],
[
19666,
19674
]
],
[
[
172,
176
],
[
14761,
14765
],
[
7763,
7767
],
[
10493,
10497
],
[
12499,
12503
],
[
15776,
15780
]
],
[
[
215,
226
],
[
14223,
14234
]
],
[
[
228,
244
],
[
14242,
14258
]
],
[
[
252,
256
],
[
12572,
12576
],
[
13793,
13797
],
[
14031,
14035
],
[
14128,
14132
]
],
[
[
264,
273
]
],
[
[
297,
306
],
[
19216,
19225
],
[
19773,
19782
]
],
[
[
314,
320
],
[
1657,
1663
],
[
1953,
1959
],
[
2007,
2013
],
[
2298,
2304
],
[
2359,
2365
],
[
2656,
2662
],
[
3220,
3226
],
[
16213,
16219
],
[
16282,
16288
],
[
16352,
16358
],
[
16437,
16443
],
[
16543,
16549
],
[
16636,
16642
],
[
16984,
16990
],
[
17124,
17130
],
[
17073,
17079
],
[
17233,
17239
],
[
17349,
17355
],
[
17322,
17328
],
[
17447,
17453
],
[
17488,
17494
],
[
17587,
17593
],
[
17697,
17703
],
[
17789,
17795
],
[
18964,
18970
]
],
[
[
9573,
9583
],
[
5410,
5420
],
[
6857,
6867
],
[
6935,
6945
],
[
11520,
11530
],
[
11632,
11642
],
[
11749,
11759
],
[
11996,
12006
],
[
16709,
16719
],
[
16807,
16817
],
[
16882,
16892
],
[
17871,
17881
],
[
17975,
17985
],
[
18060,
18070
],
[
18157,
18167
],
[
18273,
18283
],
[
18380,
18390
],
[
18469,
18479
],
[
18568,
18578
],
[
18655,
18665
]
],
[
[
13229,
13232
],
[
15325,
15328
],
[
15746,
15749
]
],
[
[
14462,
14466
],
[
22223,
22227
],
[
11873,
11877
],
[
20333,
20337
]
],
[
[
22012,
22021
],
[
14947,
14956
]
],
[
[
22068,
22077
],
[
14947,
14956
]
],
[
[
22216,
22220
]
]
] |
from bizfriendly import app
from flask.ext.heroku import Heroku
import os
heroku = Heroku(app) # Sets CONFIG automagically
app.config.update(
# DEBUG = True,
# SQLALCHEMY_DATABASE_URI = 'postgres://hackyourcity@localhost/howtocity',
# SQLALCHEMY_DATABASE_URI = 'postgres://postgres:root@localhost/howtocity',
# SECRET_KEY = '123456'
)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY')
app.config['MAIL_GUN_KEY'] = os.environ.get('MAIL_GUN_KEY')
app.config['AWS_ACCESS_KEY_ID'] = os.environ.get('AWS_ACCESS_KEY_ID')
app.config['AWS_SECRET_ACCESS_KEY'] = os.environ.get('AWS_SECRET_ACCESS_KEY')
app.config['S3_BUCKET_NAME'] = os.environ.get('S3_BUCKET_NAME')
def add_cors_header(response):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type'
response.headers['Access-Control-Allow-Methods'] = 'POST, GET, PUT, PATCH, DELETE, OPTIONS'
return response
app.after_request(add_cors_header) | [
[
[
24,
27
],
[
91,
94
],
[
124,
127
],
[
353,
356
],
[
409,
412
],
[
469,
472
],
[
539,
542
],
[
617,
620
],
[
972,
975
]
],
[
[
57,
63
],
[
84,
90
]
],
[
[
71,
73
],
[
380,
382
],
[
438,
440
],
[
503,
505
],
[
577,
579
],
[
648,
650
]
],
[
[
75,
81
]
],
[
[
686,
701
],
[
990,
1005
]
]
] |
from itertools import product
aarr = list(map(int, input().split()))
barr = list(map(int, input().split()))
print(' '.join(str(i) for i in list(product(*[aarr, barr]))))
| [
[
[
22,
29
],
[
146,
153
]
],
[
[
31,
35
],
[
156,
160
]
],
[
[
70,
74
],
[
162,
166
]
]
] |
"""
SYS-611: Example factory model in SimPy (object-oriented).
@author: Paul T. Grogan, pgrogan@stevens.edu
"""
# import the python3 behavior for importing, division, and printing in python2
from __future__ import absolute_import, division, print_function
# import the simpy package
# see https://simpy.readthedocs.io/en/latest/api_reference for documentation
import simpy
# import the numpy package and refer to it as `np`
# see http://docs.scipy.org/doc/numpy/reference/ for documentation
import numpy as np
# import the matplotlib pyplot package and refer to it as `plt`
# see http://matplotlib.org/api/pyplot_api.html for documentation
import matplotlib.pyplot as plt
#%% SECTION TO CONFIGURE SIMULATION
# number of simulation runs to perform
NUM_RUNS = 1
# simulation duration (hours)
SIM_DURATION = 5*8*52
# number of spares to purchase (S)
NUM_SPARES = 20
# number of repairers to hire (R)
NUM_REPAIRERS = 5
#%% SECTION TO DEFINE SIMULATION
class Factory(object):
""" Defines a factory simulation. """
def __init__(self, env, num_repairers, num_spares):
""" Initializes this factory.
Args:
env (simpy.Environment): the simulation environment
num_repairers (int): the number of repairers to hire
num_spares (int): the number of spares to purchase
"""
self.repairers = simpy.Resource(env, capacity=num_repairers)
self.spares = simpy.Container(env, init=num_spares, capacity=num_spares)
self.env = env
self.cost = 0
self.daily_cost = 3.75*8*num_repairers + 30*num_spares
def run(self):
""" Process to run this simulation. """
# launch the 50 machine processes
for i in range(50):
self.env.process(factory.operate_machine(i+1))
# update the daily costs each day
while True:
self.cost += self.daily_cost
yield self.env.timeout(8.0)
def operate_machine(self, machine):
""" Process to operate a machine.
Args:
machine (int): the machine number
"""
while True:
# wait until the machine breaks
yield self.env.timeout(np.random.uniform(132,182))
time_broken = self.env.now
if NUM_RUNS <= 1:
print('machine {} broke at {:.2f} ({} spares available)'.format(
machine, time_broken, self.spares.level))
# launch the repair process
self.env.process(self.repair_machine())
# wait for a spare to become available
yield self.spares.get(1)
time_replaced = self.env.now
if NUM_RUNS <= 1:
print('machine {} replaced at {:.2f}'.format(machine, time_replaced))
# update the cost for being out of service
self.cost += 20*(time_replaced-time_broken)
def repair_machine(self):
""" Process to repair a machine. """
with self.repairers.request() as request:
# wait for a repairer to become available
yield request
# perform the repair
yield self.env.timeout(np.random.uniform(4,10))
# put the machine back in the spares pool
yield self.spares.put(1)
if NUM_RUNS <= 1:
print('repair complete at {:.2f} ({} spares available)'.format(
self.env.now, self.spares.level))
# arrays to record data
obs_time = []
obs_cost = []
obs_spares = []
def observe(env, factory):
""" Process to observe the factory during a simulation.
Args:
env (simpy.Environment): the simulation environment
factory (Factory): the factory
"""
while True:
obs_time.append(env.now)
obs_cost.append(factory.cost)
obs_spares.append(factory.spares.level)
yield env.timeout(1.0)
#%% SECTION TO RUN ANALYSIS
# array to store outputs
COST = []
for i in range(NUM_RUNS):
# set the random number seed
np.random.seed(i)
# create the simpy environment
env = simpy.Environment()
# create the factory
factory = Factory(env, NUM_REPAIRERS, NUM_SPARES)
# add the factory run process
env.process(factory.run())
# add the observation process
env.process(observe(env, factory))
# run simulation
env.run(until=SIM_DURATION)
# record the final observed cost
COST.append(obs_cost[-1])
if NUM_RUNS <= 1:
# output the total cost
print('Total cost: {:.2f}'.format(factory.cost))
# plot the number of spares available
plt.figure()
plt.step(obs_time, obs_spares, where='post')
plt.xlabel('Time (hour)')
plt.ylabel('Number Spares Available')
# plot the total cost accumulation
plt.figure()
plt.step(obs_time, obs_cost, where='post')
plt.xlabel('Time (hour)')
plt.ylabel('Total Cost')
# print final results to console
print('Factory costs for N={:} runs with R={:} repairers and S={:} spares:'.format(
NUM_RUNS, NUM_REPAIRERS, NUM_SPARES))
print('\n'.join('{:.2f}'.format(i) for i in COST))
#%% SECTION TO WRITE RESULTS TO CSV FILE
import csv
with open('factory.csv', 'w') as output:
writer = csv.writer(output)
for sample in COST:
writer.writerow([sample]) | [
[
[
216,
231
]
],
[
[
233,
241
]
],
[
[
243,
257
]
],
[
[
371,
376
],
[
4120,
4125
],
[
1369,
1374
],
[
1436,
1441
]
],
[
[
503,
514
],
[
4052,
4054
],
[
2214,
2216
],
[
3194,
3196
]
],
[
[
653,
677
],
[
4656,
4659
],
[
4677,
4680
],
[
4730,
4733
],
[
4764,
4767
],
[
4862,
4865
],
[
4883,
4886
],
[
4934,
4937
],
[
4968,
4971
]
],
[
[
755,
763
],
[
4004,
4012
],
[
4489,
4497
],
[
5131,
5139
],
[
2296,
2304
],
[
2694,
2702
],
[
3325,
3333
]
],
[
[
798,
810
],
[
4396,
4408
]
],
[
[
855,
865
],
[
4207,
4217
],
[
5156,
5166
]
],
[
[
905,
918
],
[
4192,
4205
],
[
5141,
5154
]
],
[
[
964,
971
],
[
4179,
4186
]
],
[
[
3503,
3511
],
[
4686,
4694
],
[
4892,
4900
],
[
3781,
3789
]
],
[
[
3517,
3525
],
[
4463,
4471
],
[
4902,
4910
],
[
3814,
3822
]
],
[
[
3531,
3541
],
[
4696,
4706
],
[
3852,
3862
]
],
[
[
3552,
3559
],
[
4334,
4341
]
],
[
[
3978,
3982
],
[
4451,
4455
],
[
5217,
5221
],
[
5370,
5374
]
],
[
[
3993,
3994
],
[
4067,
4068
]
],
[
[
4114,
4117
],
[
4187,
4190
],
[
4257,
4260
],
[
4322,
4325
],
[
4342,
4345
],
[
4382,
4385
]
],
[
[
4169,
4176
],
[
4269,
4276
],
[
4347,
4354
],
[
4578,
4585
],
[
1774,
1781
]
],
[
[
5274,
5277
],
[
5333,
5336
]
],
[
[
5312,
5318
],
[
5344,
5350
]
],
[
[
5324,
5330
],
[
5384,
5390
]
],
[
[
5360,
5366
],
[
5401,
5407
]
]
] |
import logging
from tornado import web
from tornado import gen
from ..views import BaseHandler
from ..api.workers import ListWorkers
logger = logging.getLogger(__name__)
class WorkerView(BaseHandler):
@web.authenticated
@gen.coroutine
def get(self, name):
try:
yield ListWorkers.update_workers(app=self.application, workername=name)
except Exception as e:
logger.error(e)
worker = ListWorkers.worker_cache.get(name)
if worker is None:
raise web.HTTPError(404, "Unknown worker '%s'" % name)
if 'stats' not in worker:
raise web.HTTPError(
404,
"Unable to get stats for '%s' worker" % name
)
self.render("worker.html", worker=dict(worker, name=name))
| [
[
[
7,
14
],
[
146,
153
]
],
[
[
36,
39
],
[
212,
215
],
[
529,
532
],
[
630,
633
]
],
[
[
60,
63
],
[
235,
238
]
],
[
[
85,
96
],
[
193,
204
]
],
[
[
123,
134
],
[
305,
316
],
[
448,
459
]
],
[
[
137,
143
],
[
414,
420
]
],
[
[
182,
192
]
]
] |
from great_expectations.render.renderer.content_block.content_block import (
ContentBlockRenderer,
)
from great_expectations.render.types import (
RenderedBulletListContent,
RenderedStringTemplateContent,
)
class ExceptionListContentBlockRenderer(ContentBlockRenderer):
"""Render a bullet list of exception messages raised for provided EVRs"""
_rendered_component_type = RenderedBulletListContent
_content_block_type = "bullet_list"
_default_header = 'Failed expectations <span class="mr-3 triangle"></span>'
_default_content_block_styling = {
"classes": ["col-12"],
"styles": {"margin-top": "20px"},
"header": {
"classes": ["collapsed"],
"attributes": {
"data-toggle": "collapse",
"href": "#{{content_block_id}}-body",
"role": "button",
"aria-expanded": "true",
"aria-controls": "collapseExample",
},
"styles": {
"cursor": "pointer",
},
},
"body": {
"classes": ["list-group", "collapse"],
},
}
_default_element_styling = {
"classes": [
"list-group-item"
], # "d-flex", "justify-content-between", "align-items-center"],
"params": {
"column": {"classes": ["badge", "badge-primary"]},
"expectation_type": {"classes": ["text-monospace"]},
"exception_message": {"classes": ["text-monospace"]},
},
}
@classmethod
def render(cls, render_object, **kwargs):
return super().render(
render_object=render_object, exception_list_content_block=True
)
@classmethod
def _missing_content_block_fn(
cls,
configuration=None,
result=None,
language=None,
runtime_configuration=None,
**kwargs,
):
runtime_configuration = runtime_configuration or {}
include_column_name = runtime_configuration.get("include_column_name", True)
include_column_name = (
include_column_name if include_column_name is not None else True
)
styling = runtime_configuration.get("styling")
# Only render EVR objects for which an exception was raised
if result.exception_info["raised_exception"] is True:
template_str = "$expectation_type raised an exception: $exception_message"
if include_column_name:
template_str = f"$column: {template_str}"
try:
column = result.expectation_config.kwargs["column"]
except KeyError:
column = None
return [
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": template_str,
"params": {
"column": column,
"expectation_type": result.expectation_config.expectation_type,
"exception_message": result.exception_info[
"exception_message"
],
},
"styling": styling,
},
}
)
]
| [
[
[
81,
101
],
[
261,
281
]
],
[
[
155,
180
],
[
394,
419
]
],
[
[
186,
215
],
[
2733,
2762
]
],
[
[
227,
260
]
]
] |
#!/usr/bin/env python
#
# Electrum - Lightweight Bitcoin Client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
import threading
import base64
from functools import partial
import smtplib
import imaplib
import email
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.encoders import encode_base64
from PyQt5.QtGui import *
from PyQt5.QtCore import *
import PyQt5.QtGui as QtGui
from PyQt5.QtWidgets import (QVBoxLayout, QLabel, QGridLayout, QLineEdit)
from electrum_dash.plugins import BasePlugin, hook
from electrum_dash.paymentrequest import PaymentRequest
from electrum_dash.i18n import _
from electrum_dash_gui.qt.util import EnterButton, Buttons, CloseButton
from electrum_dash_gui.qt.util import OkButton, WindowModalDialog
class Processor(threading.Thread):
polling_interval = 5*60
def __init__(self, imap_server, username, password, callback):
threading.Thread.__init__(self)
self.daemon = True
self.username = username
self.password = password
self.imap_server = imap_server
self.on_receive = callback
def poll(self):
try:
self.M.select()
except:
return
typ, data = self.M.search(None, 'ALL')
for num in data[0].split():
typ, msg_data = self.M.fetch(num, '(RFC822)')
msg = email.message_from_string(msg_data[0][1])
p = msg.get_payload()
if not msg.is_multipart():
p = [p]
continue
for item in p:
if item.get_content_type() == "application/dash-paymentrequest":
pr_str = item.get_payload()
pr_str = base64.b64decode(pr_str)
self.on_receive(pr_str)
def run(self):
self.M = imaplib.IMAP4_SSL(self.imap_server)
self.M.login(self.username, self.password)
while True:
self.poll()
time.sleep(self.polling_interval)
self.M.close()
self.M.logout()
def send(self, recipient, message, payment_request):
msg = MIMEMultipart()
msg['Subject'] = message
msg['To'] = recipient
msg['From'] = self.username
part = MIMEBase('application', "dash-paymentrequest")
part.set_payload(payment_request)
encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="payreq.dash"')
msg.attach(part)
s = smtplib.SMTP_SSL(self.imap_server, timeout=2)
s.login(self.username, self.password)
s.sendmail(self.username, [recipient], msg.as_string())
s.quit()
class QEmailSignalObject(QObject):
email_new_invoice_signal = pyqtSignal()
class Plugin(BasePlugin):
def fullname(self):
return 'Email'
def description(self):
return _("Send and receive payment requests via email")
def is_available(self):
return True
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.imap_server = self.config.get('email_server', '')
self.username = self.config.get('email_username', '')
self.password = self.config.get('email_password', '')
if self.imap_server and self.username and self.password:
self.processor = Processor(self.imap_server, self.username, self.password, self.on_receive)
self.processor.start()
self.obj = QEmailSignalObject()
self.obj.email_new_invoice_signal.connect(self.new_invoice)
def on_receive(self, pr_str):
self.print_error('received payment request')
self.pr = PaymentRequest(pr_str)
self.obj.email_new_invoice_signal.emit()
def new_invoice(self):
self.parent.invoices.add(self.pr)
#window.update_invoices_list()
@hook
def receive_list_menu(self, menu, addr):
window = menu.parentWidget()
menu.addAction(_("Send via e-mail"), lambda: self.send(window, addr))
def send(self, window, addr):
from electrum_dash import paymentrequest
r = window.wallet.receive_requests.get(addr)
message = r.get('memo', '')
if r.get('signature'):
pr = paymentrequest.serialize_request(r)
else:
pr = paymentrequest.make_request(self.config, r)
if not pr:
return
recipient, ok = QtGui.QInputDialog.getText(window, 'Send request', 'Email invoice to:')
if not ok:
return
recipient = str(recipient)
payload = pr.SerializeToString()
self.print_error('sending mail to', recipient)
try:
self.processor.send(recipient, message, payload)
except BaseException as e:
window.show_message(str(e))
return
window.show_message(_('Request sent.'))
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'), partial(self.settings_dialog, window))
def settings_dialog(self, window):
d = WindowModalDialog(window, _("Email settings"))
d.setMinimumSize(500, 200)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Server hosting your email acount')))
grid = QGridLayout()
vbox.addLayout(grid)
grid.addWidget(QLabel('Server (IMAP)'), 0, 0)
server_e = QLineEdit()
server_e.setText(self.imap_server)
grid.addWidget(server_e, 0, 1)
grid.addWidget(QLabel('Username'), 1, 0)
username_e = QLineEdit()
username_e.setText(self.username)
grid.addWidget(username_e, 1, 1)
grid.addWidget(QLabel('Password'), 2, 0)
password_e = QLineEdit()
password_e.setText(self.password)
grid.addWidget(password_e, 2, 1)
vbox.addStretch()
vbox.addLayout(Buttons(CloseButton(d), OkButton(d)))
if not d.exec_():
return
server = str(server_e.text())
self.config.set_key('email_server', server)
username = str(username_e.text())
self.config.set_key('email_username', username)
password = str(password_e.text())
self.config.set_key('email_password', password)
| [
[
[
1170,
1174
],
[
3034,
3038
]
],
[
[
1182,
1191
],
[
1858,
1867
],
[
1981,
1990
]
],
[
[
1199,
1205
],
[
2785,
2791
]
],
[
[
1228,
1235
],
[
6076,
6083
]
],
[
[
1244,
1251
],
[
3556,
3563
]
],
[
[
1259,
1266
],
[
2891,
2898
]
],
[
[
1274,
1279
],
[
2436,
2441
]
],
[
[
1313,
1326
],
[
3187,
3200
]
],
[
[
1355,
1363
],
[
3317,
3325
]
],
[
[
1391,
1404
],
[
3414,
3427
]
],
[
[
1430,
1431
]
],
[
[
1457,
1458
],
[
3756,
3763
],
[
3797,
3807
]
],
[
[
1466,
1486
],
[
5481,
5486
]
],
[
[
1516,
1527
],
[
6265,
6276
]
],
[
[
1529,
1535
],
[
6303,
6309
],
[
6431,
6437
],
[
6599,
6605
],
[
6765,
6771
]
],
[
[
1537,
1548
],
[
6365,
6376
]
],
[
[
1550,
1559
],
[
6481,
6490
],
[
6646,
6655
],
[
6812,
6821
]
],
[
[
1596,
1606
],
[
3825,
3835
],
[
4082,
4092
]
],
[
[
1608,
1612
],
[
4922,
4926
]
],
[
[
1654,
1668
],
[
4735,
4749
]
],
[
[
1700,
1701
],
[
3929,
3930
],
[
5032,
5033
],
[
5919,
5920
],
[
6061,
6062
],
[
6193,
6194
],
[
6310,
6311
]
],
[
[
1740,
1751
],
[
6049,
6060
]
],
[
[
1753,
1760
],
[
6957,
6964
]
],
[
[
1762,
1773
],
[
6965,
6976
]
],
[
[
1812,
1820
],
[
6981,
6989
]
],
[
[
1822,
1839
],
[
6167,
6184
]
],
[
[
1848,
1857
],
[
4411,
4420
]
],
[
[
3737,
3755
],
[
4540,
4558
]
],
[
[
3818,
3824
]
]
] |
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
from __future__ import absolute_import
from mock import patch
import pytest
from module_build_service import app
from module_build_service.common import models
from module_build_service.common.models import BUILD_STATES, ModuleBuild
from module_build_service.manage import manager_wrapper, retire
from module_build_service.scheduler.db_session import db_session
from module_build_service.web.utils import deps_to_dict
from tests import clean_database, staged_data_filename
@pytest.mark.usefixtures("model_tests_init_data")
class TestMBSManage:
@pytest.mark.parametrize(
("identifier", "is_valid"),
(
("", False),
("spam", False),
("spam:bacon", True),
("spam:bacon:eggs", True),
("spam:bacon:eggs:ham", True),
("spam:bacon:eggs:ham:sausage", False),
),
)
def test_retire_identifier_validation(self, identifier, is_valid):
if is_valid:
retire(identifier)
else:
with pytest.raises(ValueError):
retire(identifier)
@pytest.mark.parametrize(
("overrides", "identifier", "changed_count"),
(
({"name": "pickme"}, "pickme:eggs", 1),
({"stream": "pickme"}, "spam:pickme", 1),
({"version": "pickme"}, "spam:eggs:pickme", 1),
({"context": "pickme"}, "spam:eggs:ham:pickme", 1),
({}, "spam:eggs", 3),
({"version": "pickme"}, "spam:eggs", 3),
({"context": "pickme"}, "spam:eggs:ham", 3),
),
)
@patch("module_build_service.manage.prompt_bool")
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count):
prompt_bool.return_value = True
module_builds = (
db_session.query(ModuleBuild)
.filter_by(state=BUILD_STATES["ready"])
.order_by(ModuleBuild.id.desc())
.all()
)
# Verify our assumption of the amount of ModuleBuilds in database
assert len(module_builds) == 3
for x, build in enumerate(module_builds):
build.name = "spam"
build.stream = "eggs"
build.version = "ham"
build.context = str(x)
for attr, value in overrides.items():
setattr(module_builds[0], attr, value)
db_session.commit()
retire(identifier)
retired_module_builds = (
db_session.query(ModuleBuild)
.filter_by(state=BUILD_STATES["garbage"])
.order_by(ModuleBuild.id.desc())
.all()
)
assert len(retired_module_builds) == changed_count
for x in range(changed_count):
assert retired_module_builds[x].id == module_builds[x].id
assert retired_module_builds[x].state == BUILD_STATES["garbage"]
@pytest.mark.parametrize(
("confirm_prompt", "confirm_arg", "confirm_expected"),
(
(True, False, True),
(True, True, True),
(False, False, False),
(False, True, True)
),
)
@patch("module_build_service.manage.prompt_bool")
def test_retire_build_confirm_prompt(
self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected
):
prompt_bool.return_value = confirm_prompt
module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
# Verify our assumption of the amount of ModuleBuilds in database
assert len(module_builds) == 3
for x, build in enumerate(module_builds):
build.name = "spam" + str(x) if x > 0 else "spam"
build.stream = "eggs"
db_session.commit()
retire("spam:eggs", confirm_arg)
retired_module_builds = (
db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
)
expected_changed_count = 1 if confirm_expected else 0
assert len(retired_module_builds) == expected_changed_count
class TestCommandBuildModuleLocally:
"""Test mbs-manager subcommand build_module_locally"""
def setup_method(self, test_method):
clean_database()
# Do not allow flask_script exits by itself because we have to assert
# something after the command finishes.
self.sys_exit_patcher = patch("sys.exit")
self.mock_sys_exit = self.sys_exit_patcher.start()
# The consumer is not required to run actually, so it does not make
# sense to publish message after creating a module build.
self.publish_patcher = patch("module_build_service.common.messaging.publish")
self.mock_publish = self.publish_patcher.start()
# Don't allow conf.set_item call to modify conf actually inside command
self.set_item_patcher = patch("module_build_service.manage.conf.set_item")
self.mock_set_item = self.set_item_patcher.start()
# Avoid to create the local sqlite database for the command, which is
# useless for running tests here.
self.create_all_patcher = patch("module_build_service.manage.db.create_all")
self.mock_create_all = self.create_all_patcher.start()
def teardown_method(self, test_method):
self.create_all_patcher.stop()
self.mock_set_item.stop()
self.publish_patcher.stop()
self.sys_exit_patcher.stop()
def _run_manager_wrapper(self, cli_cmd):
# build_module_locally changes database uri to a local SQLite database file.
# Restore the uri to original one in order to not impact the database
# session in subsequent tests.
original_db_uri = app.config["SQLALCHEMY_DATABASE_URI"]
try:
with patch("sys.argv", new=cli_cmd):
manager_wrapper()
finally:
app.config["SQLALCHEMY_DATABASE_URI"] = original_db_uri
@patch("module_build_service.scheduler.local.main")
def test_set_stream(self, main):
cli_cmd = [
"mbs-manager", "build_module_locally",
"--set-stream", "platform:f28",
"--file", staged_data_filename("testmodule-local-build.yaml")
]
self._run_manager_wrapper(cli_cmd)
# Since module_build_service.scheduler.local.main is mocked, MBS does
# not really build the testmodule for this test. Following lines assert
# the fact:
# Module testmodule-local-build is expanded and stored into database,
# and this build has buildrequires platform:f28 and requires
# platform:f28.
# Please note that, the f28 is specified from command line option
# --set-stream, which is the point this test tests.
builds = db_session.query(models.ModuleBuild).filter_by(
name="testmodule-local-build").all()
assert 1 == len(builds)
testmodule_build = builds[0]
mmd_deps = testmodule_build.mmd().get_dependencies()
deps_dict = deps_to_dict(mmd_deps[0], "buildtime")
assert ["f28"] == deps_dict["platform"]
deps_dict = deps_to_dict(mmd_deps[0], "runtime")
assert ["f28"] == deps_dict["platform"]
@patch("module_build_service.manage.logging")
def test_ambiguous_stream(self, logging):
cli_cmd = [
"mbs-manager", "build_module_locally",
"--file", staged_data_filename("testmodule-local-build.yaml")
]
self._run_manager_wrapper(cli_cmd)
args, _ = logging.error.call_args_list[0]
assert "There are multiple streams to choose from for module platform." == args[0]
args, _ = logging.error.call_args_list[1]
assert "Use '-s module_name:module_stream' to choose the stream" == args[0]
def test_module_build_failed(self):
cli_cmd = [
"mbs-manager", "build_module_locally",
"--set-stream", "platform:f28",
"--file", staged_data_filename("testmodule-local-build.yaml")
]
def main_side_effect(module_build_ids):
build = db_session.query(models.ModuleBuild).filter(
models.ModuleBuild.name == "testmodule-local-build"
).first()
build.state = models.BUILD_STATES["failed"]
db_session.commit()
# We don't run consumer actually, but it could be patched to mark some
# module build failed for test purpose.
with patch("module_build_service.scheduler.local.main",
side_effect=main_side_effect):
with pytest.raises(RuntimeError, match="Module build failed"):
self._run_manager_wrapper(cli_cmd)
| [
[
[
78,
93
]
],
[
[
112,
117
],
[
1626,
1631
],
[
3156,
3161
],
[
5941,
5946
],
[
7221,
7226
],
[
4396,
4401
],
[
4647,
4652
],
[
4872,
4877
],
[
5137,
5142
],
[
5784,
5789
],
[
8461,
8466
]
],
[
[
125,
131
],
[
533,
539
],
[
609,
615
],
[
1141,
1147
],
[
2904,
2910
],
[
1073,
1079
],
[
8579,
8585
]
],
[
[
166,
169
],
[
5716,
5719
],
[
5879,
5882
]
],
[
[
210,
216
],
[
6791,
6797
],
[
8113,
8119
],
[
8157,
8163
],
[
8257,
8263
]
],
[
[
264,
276
],
[
1897,
1909
],
[
2553,
2565
],
[
2874,
2886
],
[
3448,
3460
],
[
3900,
3912
]
],
[
[
278,
289
],
[
1855,
1866
],
[
1942,
1953
],
[
2511,
2522
],
[
2600,
2611
],
[
3419,
3430
],
[
3871,
3882
]
],
[
[
330,
345
],
[
5832,
5847
]
],
[
[
347,
353
],
[
1023,
1029
],
[
1116,
1122
],
[
2429,
2435
],
[
3775,
3781
]
],
[
[
408,
418
],
[
1838,
1848
],
[
2400,
2410
],
[
2494,
2504
],
[
3402,
3412
],
[
3746,
3756
],
[
3854,
3864
],
[
6774,
6784
],
[
8096,
8106
],
[
8299,
8309
]
],
[
[
462,
474
],
[
7023,
7035
],
[
7130,
7142
]
],
[
[
493,
507
],
[
4220,
4234
]
],
[
[
509,
529
],
[
6166,
6186
],
[
7405,
7425
],
[
7965,
7985
]
],
[
[
588,
601
]
],
[
[
4080,
4109
]
]
] |
# -*- test-case-name: twisted.test.test_factories -*-
#
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Standard implementations of Twisted protocol-related interfaces.
Start here if you are looking to write a new protocol implementation for
Twisted. The Protocol class contains some introductory material.
API Stability: stable, other than ClientCreator.
Maintainer: U{Itamar Shtull-Trauring<mailto:twisted@itamarst.org>}
"""
import random
from zope.interface import implements
# Twisted Imports
from twisted.python import log, failure, components
from twisted.internet import interfaces, error, defer
class Factory:
"""This is a factory which produces protocols.
By default, buildProtocol will create a protocol of the class given in
self.protocol.
"""
implements(interfaces.IProtocolFactory)
# put a subclass of Protocol here:
protocol = None
numPorts = 0
noisy = True
def doStart(self):
"""Make sure startFactory is called.
Users should not call this function themselves!
"""
if not self.numPorts:
if self.noisy:
log.msg("Starting factory %r" % self)
self.startFactory()
self.numPorts = self.numPorts + 1
def doStop(self):
"""Make sure stopFactory is called.
Users should not call this function themselves!
"""
if self.numPorts == 0:
# this shouldn't happen, but does sometimes and this is better
# than blowing up in assert as we did previously.
return
self.numPorts = self.numPorts - 1
if not self.numPorts:
if self.noisy:
log.msg("Stopping factory %r" % self)
self.stopFactory()
def startFactory(self):
"""This will be called before I begin listening on a Port or Connector.
It will only be called once, even if the factory is connected
to multiple ports.
This can be used to perform 'unserialization' tasks that
are best put off until things are actually running, such
as connecting to a database, opening files, etcetera.
"""
def stopFactory(self):
"""This will be called before I stop listening on all Ports/Connectors.
This can be overridden to perform 'shutdown' tasks such as disconnecting
database connections, closing files, etc.
It will be called, for example, before an application shuts down,
if it was connected to a port. User code should not call this function
directly.
"""
def buildProtocol(self, addr):
"""Create an instance of a subclass of Protocol.
The returned instance will handle input on an incoming server
connection, and an attribute \"factory\" pointing to the creating
factory.
Override this method to alter how Protocol instances get created.
@param addr: an object implementing L{twisted.internet.interfaces.IAddress}
"""
p = self.protocol()
p.factory = self
return p
class ClientFactory(Factory):
"""A Protocol factory for clients.
This can be used together with the various connectXXX methods in
reactors.
"""
def startedConnecting(self, connector):
"""Called when a connection has been started.
You can call connector.stopConnecting() to stop the connection attempt.
@param connector: a Connector object.
"""
def clientConnectionFailed(self, connector, reason):
"""Called when a connection has failed to connect.
It may be useful to call connector.connect() - this will reconnect.
@type reason: L{twisted.python.failure.Failure}
"""
def clientConnectionLost(self, connector, reason):
"""Called when an established connection is lost.
It may be useful to call connector.connect() - this will reconnect.
@type reason: L{twisted.python.failure.Failure}
"""
class _InstanceFactory(ClientFactory):
"""Factory used by ClientCreator."""
noisy = False
def __init__(self, reactor, instance, deferred):
self.reactor = reactor
self.instance = instance
self.deferred = deferred
def __repr__(self):
return "<ClientCreator factory: %r>" % (self.instance, )
def buildProtocol(self, addr):
self.reactor.callLater(0, self.deferred.callback, self.instance)
del self.deferred
return self.instance
def clientConnectionFailed(self, connector, reason):
self.reactor.callLater(0, self.deferred.errback, reason)
del self.deferred
class ClientCreator:
"""Client connections that do not require a factory.
The various connect* methods create a protocol instance using the given
protocol class and arguments, and connect it, returning a Deferred of the
resulting protocol instance.
Useful for cases when we don't really need a factory. Mainly this
is when there is no shared state between protocol instances, and no need
to reconnect.
"""
def __init__(self, reactor, protocolClass, *args, **kwargs):
self.reactor = reactor
self.protocolClass = protocolClass
self.args = args
self.kwargs = kwargs
def connectTCP(self, host, port, timeout=30, bindAddress=None):
"""Connect to remote host, return Deferred of resulting protocol instance."""
d = defer.Deferred()
f = _InstanceFactory(self.reactor, self.protocolClass(*self.args, **self.kwargs), d)
self.reactor.connectTCP(host, port, f, timeout=timeout, bindAddress=bindAddress)
return d
def connectUNIX(self, address, timeout = 30, checkPID=0):
"""Connect to Unix socket, return Deferred of resulting protocol instance."""
d = defer.Deferred()
f = _InstanceFactory(self.reactor, self.protocolClass(*self.args, **self.kwargs), d)
self.reactor.connectUNIX(address, f, timeout = timeout, checkPID=checkPID)
return d
def connectSSL(self, host, port, contextFactory, timeout=30, bindAddress=None):
"""Connect to SSL server, return Deferred of resulting protocol instance."""
d = defer.Deferred()
f = _InstanceFactory(self.reactor, self.protocolClass(*self.args, **self.kwargs), d)
self.reactor.connectSSL(host, port, f, contextFactory, timeout=timeout, bindAddress=bindAddress)
return d
class ReconnectingClientFactory(ClientFactory):
"""My clients auto-reconnect with an exponential back-off.
Note that clients should call my resetDelay method after they have
connected successfully.
@ivar maxDelay: Maximum number of seconds between connection attempts.
@ivar initialDelay: Delay for the first reconnection attempt.
@ivar factor: a multiplicitive factor by which the delay grows
@ivar jitter: percentage of randomness to introduce into the delay length
to prevent stampeding.
"""
maxDelay = 3600
initialDelay = 1.0
# Note: These highly sensitive factors have been precisely measured by
# the National Institute of Science and Technology. Take extreme care
# in altering them, or you may damage your Internet!
factor = 2.7182818284590451 # (math.e)
# Phi = 1.6180339887498948 # (Phi is acceptable for use as a
# factor if e is too large for your application.)
jitter = 0.11962656492 # molar Planck constant times c, Jule meter/mole
delay = initialDelay
retries = 0
maxRetries = None
_callID = None
connector = None
continueTrying = 1
def clientConnectionFailed(self, connector, reason):
if self.continueTrying:
self.connector = connector
self.retry()
def clientConnectionLost(self, connector, unused_reason):
if self.continueTrying:
self.connector = connector
self.retry()
def retry(self, connector=None):
"""Have this connector connect again, after a suitable delay.
"""
if not self.continueTrying:
if self.noisy:
log.msg("Abandoning %s on explicit request" % (connector,))
return
if connector is None:
if self.connector is None:
raise ValueError("no connector to retry")
else:
connector = self.connector
self.retries += 1
if self.maxRetries is not None and (self.retries > self.maxRetries):
if self.noisy:
log.msg("Abandoning %s after %d retries." %
(connector, self.retries))
return
self.delay = min(self.delay * self.factor, self.maxDelay)
if self.jitter:
self.delay = random.normalvariate(self.delay,
self.delay * self.jitter)
if self.noisy:
log.msg("%s will retry in %d seconds" % (connector, self.delay,))
from twisted.internet import reactor
def reconnector():
self._callID = None
connector.connect()
self._callID = reactor.callLater(self.delay, reconnector)
def stopTrying(self):
"""I put a stop to any attempt to reconnect in progress.
"""
# ??? Is this function really stopFactory?
if self._callID:
self._callID.cancel()
self._callID = None
if self.connector:
# Hopefully this doesn't just make clientConnectionFailed
# retry again.
try:
self.connector.stopConnecting()
except error.NotConnectingError:
pass
self.continueTrying = 0
def resetDelay(self):
"""Call me after a successful connection to reset.
I reset the delay and the retry counter.
"""
self.delay = self.initialDelay
self.retries = 0
self._callID = None
self.continueTrying = 1
class ServerFactory(Factory):
"""Subclass this to indicate that your protocol.Factory is only usable for servers.
"""
class BaseProtocol:
"""This is the abstract superclass of all protocols.
If you are going to write a new protocol for Twisted, start here. The
docstrings of this class explain how you can get started. Any protocol
implementation, either client or server, should be a subclass of me.
My API is quite simple. Implement dataReceived(data) to handle both
event-based and synchronous input; output can be sent through the
'transport' attribute, which is to be an instance that implements
L{twisted.internet.interfaces.ITransport}.
Some subclasses exist already to help you write common types of protocols:
see the L{twisted.protocols.basic} module for a few of them.
"""
connected = 0
transport = None
def makeConnection(self, transport):
"""Make a connection to a transport and a server.
This sets the 'transport' attribute of this Protocol, and calls the
connectionMade() callback.
"""
self.connected = 1
self.transport = transport
self.connectionMade()
def connectionMade(self):
"""Called when a connection is made.
This may be considered the initializer of the protocol, because
it is called when the connection is completed. For clients,
this is called once the connection to the server has been
established; for servers, this is called after an accept() call
stops blocking and a socket has been received. If you need to
send any greeting or initial message, do it here.
"""
connectionDone=failure.Failure(error.ConnectionDone())
connectionDone.cleanFailure()
class Protocol(BaseProtocol):
implements(interfaces.IProtocol)
def dataReceived(self, data):
"""Called whenever data is received.
Use this method to translate to a higher-level message. Usually, some
callback will be made upon the receipt of each complete protocol
message.
@param data: a string of indeterminate length. Please keep in mind
that you will probably need to buffer some data, as partial
(or multiple) protocol messages may be received! I recommend
that unit tests for protocols call through to this method with
differing chunk sizes, down to one byte at a time.
"""
def connectionLost(self, reason=connectionDone):
"""Called when the connection is shut down.
Clear any circular references here, and any external references
to this Protocol. The connection has been closed.
@type reason: L{twisted.python.failure.Failure}
"""
class ProtocolToConsumerAdapter(components.Adapter):
"""
This class is unstable.
"""
implements(interfaces.IConsumer)
def write(self, data):
self.original.dataReceived(data)
def registerProducer(self, producer, streaming):
pass
def unregisterProducer(self):
pass
components.registerAdapter(ProtocolToConsumerAdapter, interfaces.IProtocol,
interfaces.IConsumer)
class ConsumerToProtocolAdapter(components.Adapter):
"""
This class is unstable.
"""
implements(interfaces.IProtocol)
def dataReceived(self, data):
self.original.write(data)
def connectionLost(self, reason):
pass
def makeConnection(self, transport):
pass
def connectionMade(self):
pass
components.registerAdapter(ConsumerToProtocolAdapter, interfaces.IConsumer,
interfaces.IProtocol)
class ProcessProtocol(BaseProtocol):
"""Processes have some additional methods besides receiving data.
"""
def childDataReceived(self, childFD, data):
if childFD == 1:
self.outReceived(data)
elif childFD == 2:
self.errReceived(data)
def outReceived(self, data):
"""Some data was received from stdout."""
def errReceived(self, data):
"""Some data was received from stderr."""
def childConnectionLost(self, childFD):
if childFD == 0:
self.inConnectionLost()
elif childFD == 1:
self.outConnectionLost()
elif childFD == 2:
self.errConnectionLost()
def inConnectionLost(self):
"""This will be called when stdin is closed."""
def outConnectionLost(self):
"""This will be called when stdout is closed."""
def errConnectionLost(self):
"""This will be called when stderr is closed."""
def processEnded(self, reason):
"""This will be called when the subprocess is finished.
@type reason: L{twisted.python.failure.Failure}
"""
class AbstractDatagramProtocol:
"""Abstract protocol for datagram-oriented transports, e.g. IP, ICMP, ARP, UDP."""
transport = None
numPorts = 0
noisy = True
def __getstate__(self):
d = self.__dict__.copy()
d['transport'] = None
return d
def doStart(self):
"""Make sure startProtocol is called.
This will be called by makeConnection(), users should not call it.
"""
if not self.numPorts:
if self.noisy:
log.msg("Starting protocol %s" % self)
self.startProtocol()
self.numPorts = self.numPorts + 1
def doStop(self):
"""Make sure stopProtocol is called.
This will be called by the port, users should not call it.
"""
assert self.numPorts > 0
self.numPorts = self.numPorts - 1
self.transport = None
if not self.numPorts:
if self.noisy:
log.msg("Stopping protocol %s" % self)
self.stopProtocol()
def startProtocol(self):
"""Called when a transport is connected to this protocol.
Will only be called once, even if multiple ports are connected.
"""
def stopProtocol(self):
"""Called when the transport is disconnected.
Will only be called once, after all ports are disconnected.
"""
def makeConnection(self, transport):
"""Make a connection to a transport and a server.
This sets the 'transport' attribute of this DatagramProtocol, and calls the
doStart() callback.
"""
assert self.transport == None
self.transport = transport
self.doStart()
def datagramReceived(self, datagram, addr):
"""Called when a datagram is received.
@param datagram: the string received from the transport.
@param addr: tuple of source of datagram.
"""
class DatagramProtocol(AbstractDatagramProtocol):
"""Protocol for datagram-oriented transport, e.g. UDP."""
def connectionRefused(self):
"""Called due to error from write in connected mode.
Note this is a result of ICMP message generated by *previous*
write.
"""
class ConnectedDatagramProtocol(DatagramProtocol):
"""Protocol for connected datagram-oriented transport.
No longer necessary for UDP.
"""
def datagramReceived(self, datagram):
"""Called when a datagram is received.
@param datagram: the string received from the transport.
"""
def connectionFailed(self, failure):
"""Called if connecting failed.
Usually this will be due to a DNS lookup failure.
"""
class FileWrapper:
"""A wrapper around a file-like object to make it behave as a Transport.
This doesn't actually stream the file to the attached protocol,
and is thus useful mainly as a utility for debugging protocols.
"""
implements(interfaces.ITransport)
closed = 0
disconnecting = 0
producer = None
streamingProducer = 0
def __init__(self, file):
self.file = file
def write(self, data):
try:
self.file.write(data)
except:
self.handleException()
# self._checkProducer()
def _checkProducer(self):
# Cheating; this is called at "idle" times to allow producers to be
# found and dealt with
if self.producer:
self.producer.resumeProducing()
def registerProducer(self, producer, streaming):
"""From abstract.FileDescriptor
"""
self.producer = producer
self.streamingProducer = streaming
if not streaming:
producer.resumeProducing()
def unregisterProducer(self):
self.producer = None
def stopConsuming(self):
self.unregisterProducer()
self.loseConnection()
def writeSequence(self, iovec):
self.write("".join(iovec))
def loseConnection(self):
self.closed = 1
try:
self.file.close()
except (IOError, OSError):
self.handleException()
def getPeer(self):
# XXX: According to ITransport, this should return an IAddress!
return 'file', 'file'
def getHost(self):
# XXX: According to ITransport, this should return an IAddress!
return 'file'
def handleException(self):
pass
def resumeProducing(self):
# Never sends data anyways
pass
def pauseProducing(self):
# Never sends data anyways
pass
def stopProducing(self):
self.loseConnection()
__all__ = ["Factory", "ClientFactory", "ReconnectingClientFactory", "connectionDone",
"Protocol", "ProcessProtocol", "FileWrapper", "ServerFactory",
"AbstractDatagramProtocol", "DatagramProtocol", "ConnectedDatagramProtocol",
"ClientCreator"]
| [
[
[
478,
484
],
[
8835,
8841
]
],
[
[
512,
522
],
[
824,
834
],
[
11872,
11882
],
[
12938,
12948
],
[
13383,
13393
],
[
17838,
17848
]
],
[
[
569,
572
],
[
1170,
1173
],
[
1720,
1723
],
[
8190,
8193
],
[
8605,
8608
],
[
8976,
8979
],
[
15406,
15409
],
[
15846,
15849
]
],
[
[
574,
581
],
[
11765,
11772
]
],
[
[
583,
593
],
[
12869,
12879
],
[
13156,
13166
],
[
13314,
13324
],
[
13637,
13647
]
],
[
[
623,
633
],
[
835,
845
],
[
11883,
11893
],
[
12949,
12959
],
[
13210,
13220
],
[
13259,
13269
],
[
13394,
13404
],
[
13691,
13701
],
[
13740,
13750
],
[
17849,
17859
]
],
[
[
635,
640
],
[
11781,
11786
],
[
9699,
9704
]
],
[
[
642,
647
],
[
5518,
5523
],
[
5895,
5900
],
[
6291,
6296
]
],
[
[
656,
663
],
[
3144,
3151
],
[
10072,
10079
]
],
[
[
3130,
3143
],
[
4072,
4085
],
[
6557,
6570
]
],
[
[
4055,
4071
],
[
5547,
5563
],
[
5924,
5940
],
[
6320,
6336
]
],
[
[
4718,
4731
]
],
[
[
6531,
6556
]
],
[
[
10058,
10071
]
],
[
[
10186,
10198
],
[
11852,
11864
],
[
13785,
13797
]
],
[
[
11750,
11764
],
[
11805,
11819
],
[
12565,
12579
]
],
[
[
11843,
11851
]
],
[
[
12843,
12868
],
[
13183,
13208
]
],
[
[
13288,
13313
],
[
13664,
13689
]
],
[
[
13769,
13784
]
],
[
[
14897,
14921
],
[
16832,
16856
]
],
[
[
16815,
16831
],
[
17148,
17164
]
],
[
[
17122,
17147
]
],
[
[
17598,
17609
]
],
[
[
19545,
19552
]
]
] |
# -*- coding: utf-8 -*-
"""
AsciiDoc Reader
===============
This plugin allows you to use AsciiDoc to write your posts.
File extension should be ``.asc``, ``.adoc``, or ``asciidoc``.
"""
from pelican.readers import BaseReader
from pelican.utils import pelican_open
from pelican import signals
import six
try:
# asciidocapi won't import on Py3
from .asciidocapi import AsciiDocAPI, AsciiDocError
# AsciiDocAPI class checks for asciidoc.py
AsciiDocAPI()
except:
asciidoc_enabled = False
else:
asciidoc_enabled = True
class AsciiDocReader(BaseReader):
"""Reader for AsciiDoc files"""
enabled = asciidoc_enabled
file_extensions = ['asc', 'adoc', 'asciidoc']
default_options = ["--no-header-footer", "-a newline=\\n"]
default_backend = 'html5'
def read(self, source_path):
"""Parse content and metadata of asciidoc files"""
from cStringIO import StringIO
with pelican_open(source_path) as source:
text = StringIO(source.encode('utf8'))
content = StringIO()
ad = AsciiDocAPI()
options = self.settings.get('ASCIIDOC_OPTIONS', [])
options = self.default_options + options
for o in options:
ad.options(*o.split())
backend = self.settings.get('ASCIIDOC_BACKEND', self.default_backend)
ad.execute(text, content, backend=backend)
content = content.getvalue().decode('utf8')
metadata = {}
for name, value in ad.asciidoc.document.attributes.items():
name = name.lower()
metadata[name] = self.process_metadata(name, six.text_type(value))
if 'doctitle' in metadata:
metadata['title'] = metadata['doctitle']
return content, metadata
def add_reader(readers):
for ext in AsciiDocReader.file_extensions:
readers.reader_classes[ext] = AsciiDocReader
def register():
signals.readers_init.connect(add_reader)
| [
[
[
218,
228
],
[
567,
577
]
],
[
[
255,
267
],
[
936,
948
]
],
[
[
288,
295
],
[
1903,
1910
]
],
[
[
303,
306
],
[
1613,
1616
]
],
[
[
380,
391
],
[
458,
469
],
[
1066,
1077
]
],
[
[
393,
406
]
],
[
[
519,
535
],
[
631,
647
]
],
[
[
484,
500
],
[
631,
647
]
],
[
[
552,
566
],
[
1797,
1811
],
[
1867,
1881
]
],
[
[
1761,
1771
],
[
1932,
1942
]
],
[
[
1887,
1895
]
]
] |
# Generated by Django 2.2.4 on 2019-08-13 12:04
from django.db import migrations
import mdeditor.fields
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_post_clicks'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='excerpt',
),
migrations.AlterField(
model_name='post',
name='body',
field=mdeditor.fields.MDTextField(),
),
]
| [
[
[
71,
81
],
[
123,
133
],
[
240,
250
],
[
342,
352
]
],
[
[
89,
104
],
[
439,
447
]
],
[
[
113,
122
]
]
] |
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from drfpasswordless.views import (ObtainEmailCallbackToken,
ObtainMobileCallbackToken,
ObtainAuthTokenFromCallbackToken,
VerifyAliasFromCallbackToken,
ObtainEmailVerificationCallbackToken,
ObtainMobileVerificationCallbackToken, )
urlpatterns = [url(r'^callback/auth/$', ObtainAuthTokenFromCallbackToken.as_view(), name='auth_callback'),
url(r'^auth/email/$', ObtainEmailCallbackToken.as_view(), name='auth_email'),
url(r'^auth/mobile/$', ObtainMobileCallbackToken.as_view(), name='auth_mobile'),
url(r'^callback/verify/$', VerifyAliasFromCallbackToken.as_view(), name='verify_callback'),
url(r'^verify/email/$', ObtainEmailVerificationCallbackToken.as_view(), name='verify_email'),
url(r'^verify/mobile/$', ObtainMobileVerificationCallbackToken.as_view(), name='verify_mobile')]
format_suffix_patterns(urlpatterns)
| [
[
[
29,
32
],
[
517,
520
],
[
624,
627
],
[
717,
720
],
[
813,
816
],
[
920,
923
],
[
1029,
1032
]
],
[
[
72,
94
],
[
1127,
1149
]
],
[
[
130,
154
],
[
646,
670
]
],
[
[
191,
216
],
[
740,
765
]
],
[
[
253,
285
],
[
542,
574
]
],
[
[
322,
350
],
[
840,
868
]
],
[
[
387,
423
],
[
944,
980
]
],
[
[
460,
497
],
[
1054,
1091
]
],
[
[
502,
513
],
[
1150,
1161
]
]
] |
import distutils.sysconfig
import os
import platform
import re
import sys
def get_python_relative_libdir():
"""Returns the appropropriate python libdir relative to the build directory.
@param exe_path the path to the lldb executable
@return the python path that needs to be added to sys.path (PYTHONPATH)
in order to find the lldb python module.
"""
if platform.system() != 'Linux':
return None
# We currently have a bug in lldb -P that does not account for
# architecture variants in python paths for
# architecture-specific modules. Handle the lookup here.
# When that bug is fixed, we should just ask lldb for the
# right answer always.
arch_specific_libdir = distutils.sysconfig.get_python_lib(True, False)
split_libdir = arch_specific_libdir.split(os.sep)
lib_re = re.compile(r"^lib.+$")
for i in range(len(split_libdir)):
match = lib_re.match(split_libdir[i])
if match is not None:
# We'll call this the relative root of the lib dir.
# Things like RHEL will have an arch-specific python
# lib dir, which isn't 'lib' on x86_64.
return os.sep.join(split_libdir[i:])
# Didn't resolve it.
return None
if __name__ == '__main__':
lib_dir = get_python_relative_libdir()
if lib_dir is not None:
sys.stdout.write(lib_dir)
sys.exit(0)
else:
sys.exit(1)
| [
[
[
7,
26
],
[
725,
734
]
],
[
[
34,
36
],
[
819,
821
],
[
1179,
1181
]
],
[
[
44,
52
],
[
381,
389
]
],
[
[
60,
62
],
[
840,
842
]
],
[
[
70,
73
],
[
1357,
1360
],
[
1391,
1394
],
[
1421,
1424
]
],
[
[
80,
106
],
[
1292,
1318
]
],
[
[
1282,
1289
],
[
1328,
1335
],
[
1374,
1381
]
]
] |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import random
import allure
import coreapi
import pytest
from adcm_client.objects import ADCMClient
from adcm_pytest_plugin.utils import get_data_dir
from adcm_pytest_plugin import utils
from jsonschema import validate
# pylint: disable=E0401, W0601, W0611, W0621, W0212
from tests.library import errorcodes as err
from tests.library import steps
from tests.library.utils import get_random_service, get_random_host_prototype
SCHEMAS = os.path.join(os.path.dirname(__file__), "schemas/")
host_bad_configs = (({"str-key": "{1bbb}", "required": "158", "option": "my.host",
"sub": {"sub1": 3}, "credentials": {"sample_string": "test",
"read_only_initiated": 1}},
"should be integer"),
({"str-key": 61, "required": 158, "fkey": 18.3,
"option": "my.host", "sub": {"sub1": 3},
"credentials": {"sample_string": "txt",
"read_only_initiated": {}}},
'should be string'),
({"str-key": "{1bbb}", "required": 158, "fkey": 18.3,
"option": "my.host", "sub": {"sub1": 9}},
'not in option list'),
({"str-key": "{1bbb}", "required": 158, "option": 8080,
"sub": {"sub1": {"foo": "bar"}}},
'should be flat')
)
@pytest.fixture(scope="module")
def hostprovider(sdk_client_ms: ADCMClient):
bundle = sdk_client_ms.upload_from_fs(get_data_dir(__file__, 'hostprovider_bundle'))
return bundle.provider_create(utils.random_string())
@pytest.fixture(scope="module")
def host(sdk_client_ms: ADCMClient, hostprovider):
return hostprovider.host_create(utils.random_string())
@pytest.fixture(scope="module")
def cluster(sdk_client_ms: ADCMClient):
return sdk_client_ms.upload_from_fs(get_data_dir(__file__, 'cluster_bundle'))
@pytest.fixture(scope="module")
def client(sdk_client_ms: ADCMClient, cluster, hostprovider):
return sdk_client_ms.adcm()._api.objects
class TestHost:
"""
Basic tests for host
"""
def test_validate_host_prototype(self, client):
host_prototype = json.loads(json.dumps(client.stack.host.list()[0]))
schema = json.load(
open(SCHEMAS + '/stack_list_item_schema.json')
)
with allure.step('Match prototype with schema'):
assert validate(host_prototype, schema) is None
steps.delete_all_data(client)
def test_create_host(self, sdk_client_fs: ADCMClient):
"""Check that host have same fqdn and status after reread config
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_bundle'))
hp = bundle.provider_create(utils.random_string())
host = hp.host_create(utils.random_string())
host_status_before = host.status
host_fqdn_before = host.fqdn
with allure.step('Reread host'):
host.reread()
host_status_after = host.status
host_fqdn_after = host.fqdn
with allure.step('Check states and fqdn'):
assert host_fqdn_before == host_fqdn_after
assert host_status_before == host_status_after
def test_shouldnt_create_duplicate_host(self, sdk_client_fs: ADCMClient):
"""We have restriction for create duplicated hosts (wuth the same fqdn).
Scenario:
1. Create hostprovider
2. Create first host
3. Create second host with the same FQDN
4. Check that we've got 409 error for second host creation
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
hp.host_create("duplicate")
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
hp.host_create('duplicate')
with allure.step('Check host conflict'):
err.HOST_CONFLICT.equal(e, 'duplicate host')
def test_shouldnt_create_host_with_unknown_prototype(self, client):
with allure.step('Create provider'):
provider_id = client.provider.create(prototype_id=client.stack.provider.list()[0]['id'],
name=utils.random_string())['id']
with allure.step('Create host'):
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
client.host.create(prototype_id=random.randint(100, 500),
provider_id=provider_id,
fqdn=utils.random_string())
with allure.step('Check PROTOTYPE_NOT_FOUND error'):
err.PROTOTYPE_NOT_FOUND.equal(e, 'prototype doesn\'t exist')
def test_shouldnt_create_host_wo_prototype(self, client):
with allure.step('Create provider'):
provider = client.provider.create(prototype_id=client.stack.provider.list()[0]['id'],
name=utils.random_string())
with allure.step('Try to create host without prototype'):
with pytest.raises(coreapi.exceptions.ParameterError) as e:
client.host.create(provider_id=provider['id'], fqdn=utils.random_string())
with allure.step('Check prototype_id error'):
assert str(e.value) == "{'prototype_id': 'This parameter is required.'}"
def test_shouldnt_create_host_wo_provider(self, client):
with allure.step('Create prototype'):
proto = get_random_host_prototype(client)
with pytest.raises(coreapi.exceptions.ParameterError) as e:
client.host.create(prototype_id=proto['id'], fqdn=utils.random_string())
with allure.step('Check provider_id error'):
assert str(e.value) == "{'provider_id': 'This parameter is required.'}"
def test_create_host_with_max_length_plus_1(self, sdk_client_fs: ADCMClient):
"""We cannot create host with name more then max length
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
hp.host_create(utils.random_string(257))
with allure.step('Check LONG_NAME error'):
err.LONG_NAME.equal(e, 'Host name is too long. Max length is 256')
def test_shouldnt_create_host_with_wrong_name(self, sdk_client_fs: ADCMClient):
"""Check that host name cannot contain special characters
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
hp.host_create(utils.random_string() + utils.random_special_chars())
with allure.step('Check WRONG_NAME error'):
err.WRONG_NAME.equal(e, 'Host name is incorrect. '
'Only latin characters, digits, dots (.)')
def test_get_host_list(self, sdk_client_fs: ADCMClient):
"""Create multiple hosts and check that all hosts was created
"""
expected_list = set()
actual_list = set()
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
for fqdn in utils.random_string_list():
hp.host_create(fqdn)
expected_list.add(fqdn)
for host in sdk_client_fs.host_list():
actual_list.add(host.fqdn)
with allure.step('Check created hosts with the data from the API'):
assert actual_list == expected_list
def test_get_host_info(self, client):
host = steps.create_host_w_default_provider(client, utils.random_string())
actual = steps.read_host(client, host['id'])
with allure.step('Check created host with the data from the API'):
del actual['status']
del host['status']
assert actual == host
def test_delete_host(self, sdk_client_fs: ADCMClient):
"""Check that we can delete host"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
host = hp.host_create("deletion_host")
with allure.step('delete host'):
deletion_result = host.delete()
with allure.step('Check that host is deleted'):
assert deletion_result is None
def test_should_return_correct_error_when_read_deleted(self, sdk_client_fs: ADCMClient):
"""Check that we have 409 error if host not found"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
host = hp.host_create(utils.random_string())
with allure.step('delete host'):
host.delete()
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
host.reread()
with allure.step('Check HOST_NOT_FOUND'):
err.HOST_NOT_FOUND.equal(e)
def test_should_return_correct_error_when_delete_nonexist_host(
self, sdk_client_fs: ADCMClient):
"""If we try to delete deleted host we've got 409 error.
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
hp = bundle.provider_create(utils.random_string())
host = hp.host_create(utils.random_string())
with allure.step('delete host'):
host.delete()
with allure.step('delete host second time'):
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
host.delete()
with allure.step('Check HOST_NOT_FOUND'):
err.HOST_NOT_FOUND.equal(e, 'host doesn\'t exist')
# *** Basic tests for hostcomponent ***
def test_create_hostcomponent(self, sdk_client_fs: ADCMClient):
"""Check that hostcomponent id the same in component list and for service
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(
__file__, 'cluster_service_hostcomponent'))
bundle_hp = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_simple'))
cluster = bundle.cluster_create(utils.random_string())
hp = bundle_hp.provider_create(utils.random_string())
host = hp.host_create(utils.random_string())
cluster.host_add(host)
service = cluster.service_add(name="ZOOKEEPER")
component_list = service.component_list()
component = service.component(name='ZOOKEEPER_CLIENT')
with allure.step('Check component id and name'):
assert component.component_id == component_list[0].component_id
assert component.name == component_list[0].name
def test_get_hostcomponent_list(self, client): # invalid case, random component takes in circle
cluster = steps.create_cluster(client)
service = steps.read_service(client, get_random_service(client)['id'])
cluster_svc = client.cluster.service.create(cluster_id=cluster['id'],
prototype_id=service['id'])
components = client.cluster.service.component.list(cluster_id=cluster['id'],
service_id=cluster_svc['id'])
# create mapping between cluster and hosts, then create hostcomponent on host
hostcomponent_list = []
for fqdn in utils.random_string_list():
host = steps.create_host_w_default_provider(client, fqdn)
steps.add_host_to_cluster(client, host, cluster)
component = random.choice(components)['id']
hostcomponent_list.append({"host_id": host['id'], "service_id": cluster_svc['id'],
"component_id": component})
expected_hostcomponent_list = client.cluster.hostcomponent.create(
cluster_id=cluster['id'], hc=hostcomponent_list)
actual_hs_list = client.cluster.hostcomponent.list(cluster_id=cluster['id'])
with allure.step('Check created data with data from API'):
assert actual_hs_list == expected_hostcomponent_list
class TestHostConfig:
"""Class for test host configuration"""
def test_config_history_url_must_point_to_the_host_config(self, client):
host = steps.create_host_w_default_provider(client, utils.random_string())
config = {"str-key": "{1bbb}", "required": 158, "option": 8080, "sub": {"sub1": 2},
"credentials": {"sample_string": "txt", "read_only_initiated": {}}}
i = 0
with allure.step('Create host history'):
while i < random.randint(0, 10):
client.host.config.history.create(host_id=host['id'],
description=utils.random_string(),
config=config)
i += 1
history = client.host.config.history.list(host_id=host['id'])
with allure.step('Check host history'):
for conf in history:
assert ('host/{0}/config/'.format(host['id']) in conf['url']) is True
steps.delete_all_data(client)
def test_get_default_host_config(self, client):
# Get a default host config and validate it with json schema
host = steps.create_host_w_default_provider(client, utils.random_string())
config_json = {}
with allure.step('Get default configuration from host'):
config = client.host.config.current.list(host_id=host['id'])
if config:
config_json = json.loads(json.dumps(config))
schema = json.load(open(SCHEMAS + '/config_item_schema.json'))
with allure.step('Check config'):
assert validate(config_json, schema) is None
steps.delete_all_data(client)
def test_get_config_from_nonexistant_host(self, sdk_client_fs: ADCMClient):
"""Get configuration for non exist host.
"""
bundle_hp = sdk_client_fs.upload_from_fs(get_data_dir(
__file__, 'hostprovider_simple'))
hp = bundle_hp.provider_create(utils.random_string())
with allure.step('Get host config from a non existant host'):
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
hp.host(host_id=random.randint(100, 500))
with allure.step('Check error host doesn\'t exist'):
err.HOST_NOT_FOUND.equal(e, 'host doesn\'t exist')
def test_shouldnt_create_host_config_when_config_not_json_string(self, client):
"""Should not create host configuration when config string is not json
"""
host = steps.create_host_w_default_provider(client, utils.random_string())
config = utils.random_string()
with allure.step('Try to create the host config from non-json string'):
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
client.host.config.history.create(host_id=host['id'], config=config)
with allure.step('Check error config should not be just one string'):
err.JSON_ERROR.equal(e, 'config should not be just one string')
def test_shouldnt_create_host_config_when_config_is_number(self, client):
"""Should not create host configuration when config string is number
"""
host = steps.create_host_w_default_provider(client, utils.random_string())
config = random.randint(100, 999)
with allure.step('Try to create the host configuration with a number'):
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
client.host.config.history.create(host_id=host['id'], config=config)
with allure.step('Check error should not be just one int or float'):
err.JSON_ERROR.equal(e, 'should not be just one int or float')
@pytest.mark.parametrize(('config', 'error'), host_bad_configs)
def test_change_host_config_negative(self, host, config, error):
"""Check that we have error if try to update host config with bad configuration
:param host: host object
:param config: dict with bad config
:param error: expected error
"""
with allure.step('Try to create config when parameter is not integer'):
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
host.config_set(config)
with allure.step(f'Check error {error}'):
err.CONFIG_VALUE_ERROR.equal(e, error)
def test_should_create_host_config_when_parameter_is_integer_and_not_float(
self, sdk_client_fs: ADCMClient):
"""Create host config for float parameter with integer
"""
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider_bundle'))
hp = bundle.provider_create(utils.random_string())
host = hp.host_create(utils.random_string())
config = {"str-key": "{1bbb}", "required": 158, "fkey": 18, "option": "my.host",
"sub": {"sub1": 3},
"credentials": {"sample_string": "txt", "read_only_initiated": {}}}
host.config_set(config)
| [
[
[
552,
556
],
[
2833,
2837
],
[
2844,
2848
],
[
2902,
2906
],
[
14443,
14447
],
[
14454,
14458
],
[
14491,
14495
]
],
[
[
564,
566
],
[
1006,
1008
],
[
1019,
1021
]
],
[
[
574,
580
],
[
5108,
5114
],
[
12446,
12452
],
[
13485,
13491
],
[
15167,
15173
],
[
16272,
16278
]
],
[
[
589,
595
],
[
2995,
3001
],
[
3578,
3584
],
[
3729,
3735
],
[
4554,
4560
],
[
4733,
4739
],
[
4962,
4968
],
[
5270,
5276
],
[
5467,
5473
],
[
5684,
5690
],
[
5913,
5919
],
[
6114,
6120
],
[
6375,
6381
],
[
6942,
6948
],
[
7535,
7541
],
[
8286,
8292
],
[
8589,
8595
],
[
9065,
9071
],
[
9150,
9156
],
[
9609,
9615
],
[
9768,
9774
],
[
10255,
10261
],
[
10322,
10328
],
[
10475,
10481
],
[
11385,
11391
],
[
12874,
12880
],
[
13427,
13433
],
[
13838,
13844
],
[
14273,
14279
],
[
14558,
14564
],
[
15008,
15014
],
[
15206,
15212
],
[
15628,
15634
],
[
15863,
15869
],
[
16310,
16316
],
[
16545,
16551
],
[
17049,
17055
],
[
17239,
17245
]
],
[
[
603,
610
],
[
4462,
4469
],
[
5021,
5028
],
[
5768,
5775
],
[
6232,
6239
],
[
6837,
6844
],
[
7402,
7409
],
[
9690,
9697
],
[
10393,
10400
],
[
15096,
15103
],
[
15726,
15733
],
[
16408,
16415
],
[
17147,
17154
]
],
[
[
618,
624
],
[
2034,
2040
],
[
2259,
2265
],
[
2403,
2409
],
[
2559,
2565
],
[
16690,
16696
],
[
4448,
4454
],
[
5007,
5013
],
[
5754,
5760
],
[
6218,
6224
],
[
6823,
6829
],
[
7388,
7394
],
[
9676,
9682
],
[
10379,
10385
],
[
15082,
15088
],
[
15712,
15718
],
[
16394,
16400
],
[
17133,
17139
]
],
[
[
658,
668
],
[
2097,
2107
],
[
2314,
2324
],
[
2461,
2471
],
[
2616,
2626
],
[
3184,
3194
],
[
3947,
3957
],
[
6569,
6579
],
[
7131,
7141
],
[
7765,
7775
],
[
8796,
8806
],
[
9317,
9327
],
[
9947,
9957
],
[
10675,
10685
],
[
14750,
14760
],
[
17441,
17451
]
],
[
[
706,
718
],
[
2152,
2164
],
[
2514,
2526
],
[
3328,
3340
],
[
4293,
4305
],
[
6704,
6716
],
[
7269,
7281
],
[
7964,
7976
],
[
8899,
8911
],
[
9437,
9449
],
[
10083,
10095
],
[
10828,
10840
],
[
10947,
10959
],
[
14873,
14885
],
[
17575,
17587
]
],
[
[
750,
755
],
[
2233,
2238
],
[
2377,
2382
],
[
3411,
3416
],
[
3464,
3469
],
[
4376,
4381
],
[
4920,
4925
],
[
5234,
5239
],
[
5648,
5653
],
[
5877,
5882
],
[
6339,
6344
],
[
6787,
6792
],
[
6903,
6908
],
[
7352,
7357
],
[
7468,
7473
],
[
7492,
7497
],
[
8047,
8052
],
[
8090,
8095
],
[
8500,
8505
],
[
8982,
8987
],
[
9520,
9525
],
[
9573,
9578
],
[
10166,
10171
],
[
10219,
10224
],
[
11034,
11039
],
[
11096,
11101
],
[
11149,
11154
],
[
12263,
12268
],
[
13199,
13204
],
[
13640,
13645
],
[
14212,
14217
],
[
14972,
14977
],
[
15553,
15558
],
[
15593,
15598
],
[
16232,
16237
],
[
17658,
17663
],
[
17711,
17716
]
],
[
[
779,
787
],
[
3058,
3066
],
[
14606,
14614
]
],
[
[
867,
884
],
[
4602,
4605
],
[
5330,
5333
],
[
6992,
6995
],
[
7586,
7589
],
[
9817,
9820
],
[
10524,
10527
],
[
15266,
15269
],
[
15940,
15943
],
[
16621,
16624
],
[
17288,
17291
]
],
[
[
911,
916
],
[
3107,
3112
],
[
8455,
8460
],
[
8540,
8545
],
[
11685,
11690
],
[
11732,
11737
],
[
12310,
12315
],
[
12373,
12378
],
[
13154,
13159
],
[
14000,
14005
],
[
14167,
14172
],
[
14652,
14657
],
[
15508,
15513
],
[
16187,
16192
]
],
[
[
949,
967
],
[
11759,
11777
]
],
[
[
969,
994
],
[
6167,
6192
]
],
[
[
996,
1003
],
[
2930,
2937
],
[
14506,
14513
]
],
[
[
1059,
1075
],
[
16735,
16751
]
],
[
[
2069,
2081
]
],
[
[
2294,
2298
]
],
[
[
2438,
2445
]
],
[
[
2594,
2600
]
],
[
[
2705,
2713
]
],
[
[
13001,
13015
]
]
] |
class Engine:
PANDAS = "pandas"
POSTGRES = "postgres"
PRESTO = "Presto"
SPARK = "Spark"
SQL_SERVER = "SqlServer"
known_engines = {PANDAS, POSTGRES, PRESTO, SPARK, SQL_SERVER}
| [
[
[
6,
12
]
]
] |
#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/imageworks/OpenShadingLanguage
# This shader would ordinarily issue a warning.
# With -Werror, it should be upgraded to an error.
failureok = 1 # this test is expected to have oslc errors
oslcargs = "-Werror"
# No need, the shader in this dir are always compiled
#command = oslc("test.osl")
| [
[
[
279,
288
]
],
[
[
341,
349
]
]
] |
from multiprocessing import freeze_support
from pathlib import Path
from typing import Dict
from deafwave.full_node.full_node import FullNode
from deafwave.rpc.full_node_rpc_api import FullNodeRpcApi
from deafwave.server.outbound_message import NodeType
from deafwave.server.start_service import run_service
from deafwave.util.block_tools import BlockTools, test_constants
from deafwave.util.config import load_config_cli
from deafwave.util.default_root import DEFAULT_ROOT_PATH
from deafwave.util.path import mkdir, path_from_root
from .full_node_simulator import FullNodeSimulator
# See: https://bugs.python.org/issue29288
"".encode("idna")
SERVICE_NAME = "full_node"
def service_kwargs_for_full_node_simulator(root_path: Path, config: Dict, bt: BlockTools) -> Dict:
mkdir(path_from_root(root_path, config["database_path"]).parent)
constants = bt.constants
node = FullNode(
config,
root_path=root_path,
consensus_constants=constants,
name=SERVICE_NAME,
)
peer_api = FullNodeSimulator(node, bt)
network_id = config["selected_network"]
kwargs = dict(
root_path=root_path,
node=node,
peer_api=peer_api,
node_type=NodeType.FULL_NODE,
advertised_port=config["port"],
service_name=SERVICE_NAME,
server_listen_ports=[config["port"]],
on_connect_callback=node.on_connect,
rpc_info=(FullNodeRpcApi, config["rpc_port"]),
network_id=network_id,
)
return kwargs
def main() -> None:
config = load_config_cli(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
config["database_path"] = config["simulator_database_path"]
config["peer_db_path"] = config["simulator_peer_db_path"]
config["introducer_peer"]["host"] = "127.0.0.1"
config["introducer_peer"]["port"] = 58735
config["selected_network"] = "testnet0"
config["simulation"] = True
kwargs = service_kwargs_for_full_node_simulator(
DEFAULT_ROOT_PATH,
config,
BlockTools(test_constants),
)
return run_service(**kwargs)
if __name__ == "__main__":
freeze_support()
main()
| [
[
[
28,
42
],
[
2111,
2125
]
],
[
[
63,
67
],
[
730,
734
]
],
[
[
87,
91
],
[
769,
773
],
[
744,
748
]
],
[
[
134,
142
],
[
885,
893
]
],
[
[
186,
200
],
[
1416,
1430
]
],
[
[
246,
254
],
[
1212,
1220
]
],
[
[
297,
308
],
[
2056,
2067
]
],
[
[
347,
357
],
[
754,
764
],
[
2011,
2021
]
],
[
[
359,
373
],
[
2022,
2036
]
],
[
[
407,
422
],
[
1543,
1558
]
],
[
[
462,
479
],
[
1559,
1576
],
[
1968,
1985
]
],
[
[
511,
516
],
[
779,
784
]
],
[
[
518,
532
],
[
785,
799
]
],
[
[
567,
584
],
[
1028,
1045
]
],
[
[
647,
659
],
[
992,
1004
],
[
1293,
1305
],
[
1593,
1605
]
],
[
[
680,
718
],
[
1920,
1958
]
],
[
[
1514,
1518
],
[
2132,
2136
]
]
] |
"""
Computes putative binding pockets on protein.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__author__ = "Bharath Ramsundar"
__copyright__ = "Copyright 2017, Stanford University"
__license__ = "MIT"
import os
import tempfile
import numpy as np
from subprocess import call
from scipy.spatial import ConvexHull
from deepchem.feat.binding_pocket_features import BindingPocketFeaturizer
from deepchem.feat.fingerprints import CircularFingerprint
from deepchem.models.sklearn_models import SklearnModel
from deepchem.utils import rdkit_util
def extract_active_site(protein_file, ligand_file, cutoff=4):
"""Extracts a box for the active site."""
protein_coords = rdkit_util.load_molecule(
protein_file, add_hydrogens=False)[0]
ligand_coords = rdkit_util.load_molecule(
ligand_file, add_hydrogens=True, calc_charges=True)[0]
num_ligand_atoms = len(ligand_coords)
num_protein_atoms = len(protein_coords)
pocket_inds = []
pocket_atoms = set([])
for lig_atom_ind in range(num_ligand_atoms):
lig_atom = ligand_coords[lig_atom_ind]
for protein_atom_ind in range(num_protein_atoms):
protein_atom = protein_coords[protein_atom_ind]
if np.linalg.norm(lig_atom - protein_atom) < cutoff:
if protein_atom_ind not in pocket_atoms:
pocket_atoms = pocket_atoms.union(set([protein_atom_ind]))
# Should be an array of size (n_pocket_atoms, 3)
pocket_atoms = list(pocket_atoms)
n_pocket_atoms = len(pocket_atoms)
pocket_coords = np.zeros((n_pocket_atoms, 3))
for ind, pocket_ind in enumerate(pocket_atoms):
pocket_coords[ind] = protein_coords[pocket_ind]
x_min = int(np.floor(np.amin(pocket_coords[:, 0])))
x_max = int(np.ceil(np.amax(pocket_coords[:, 0])))
y_min = int(np.floor(np.amin(pocket_coords[:, 1])))
y_max = int(np.ceil(np.amax(pocket_coords[:, 1])))
z_min = int(np.floor(np.amin(pocket_coords[:, 2])))
z_max = int(np.ceil(np.amax(pocket_coords[:, 2])))
return (((x_min, x_max), (y_min, y_max), (z_min, z_max)), pocket_atoms,
pocket_coords)
def compute_overlap(mapping, box1, box2):
"""Computes overlap between the two boxes.
Overlap is defined as % atoms of box1 in box2. Note that
overlap is not a symmetric measurement.
"""
atom1 = set(mapping[box1])
atom2 = set(mapping[box2])
return len(atom1.intersection(atom2)) / float(len(atom1))
def get_all_boxes(coords, pad=5):
"""Get all pocket boxes for protein coords.
We pad all boxes the prescribed number of angstroms.
TODO(rbharath): It looks like this may perhaps be non-deterministic?
"""
hull = ConvexHull(coords)
boxes = []
for triangle in hull.simplices:
# coords[triangle, 0] gives the x-dimension of all triangle points
# Take transpose to make sure rows correspond to atoms.
points = np.array(
[coords[triangle, 0], coords[triangle, 1], coords[triangle, 2]]).T
# We voxelize so all grids have integral coordinates (convenience)
x_min, x_max = np.amin(points[:, 0]), np.amax(points[:, 0])
x_min, x_max = int(np.floor(x_min)) - pad, int(np.ceil(x_max)) + pad
y_min, y_max = np.amin(points[:, 1]), np.amax(points[:, 1])
y_min, y_max = int(np.floor(y_min)) - pad, int(np.ceil(y_max)) + pad
z_min, z_max = np.amin(points[:, 2]), np.amax(points[:, 2])
z_min, z_max = int(np.floor(z_min)) - pad, int(np.ceil(z_max)) + pad
boxes.append(((x_min, x_max), (y_min, y_max), (z_min, z_max)))
return boxes
def boxes_to_atoms(atom_coords, boxes):
"""Maps each box to a list of atoms in that box.
TODO(rbharath): This does a num_atoms x num_boxes computations. Is
there a reasonable heuristic we can use to speed this up?
"""
mapping = {}
for box_ind, box in enumerate(boxes):
box_atoms = []
(x_min, x_max), (y_min, y_max), (z_min, z_max) = box
print("Handing box %d/%d" % (box_ind, len(boxes)))
for atom_ind in range(len(atom_coords)):
atom = atom_coords[atom_ind]
x_cont = x_min <= atom[0] and atom[0] <= x_max
y_cont = y_min <= atom[1] and atom[1] <= y_max
z_cont = z_min <= atom[2] and atom[2] <= z_max
if x_cont and y_cont and z_cont:
box_atoms.append(atom_ind)
mapping[box] = box_atoms
return mapping
def merge_boxes(box1, box2):
"""Merges two boxes."""
(x_min1, x_max1), (y_min1, y_max1), (z_min1, z_max1) = box1
(x_min2, x_max2), (y_min2, y_max2), (z_min2, z_max2) = box2
x_min = min(x_min1, x_min2)
y_min = min(y_min1, y_min2)
z_min = min(z_min1, z_min2)
x_max = max(x_max1, x_max2)
y_max = max(y_max1, y_max2)
z_max = max(z_max1, z_max2)
return ((x_min, x_max), (y_min, y_max), (z_min, z_max))
def merge_overlapping_boxes(mapping, boxes, threshold=.8):
"""Merge boxes which have an overlap greater than threshold.
TODO(rbharath): This merge code is terribly inelegant. It's also quadratic
in number of boxes. It feels like there ought to be an elegant divide and
conquer approach here. Figure out later...
"""
num_boxes = len(boxes)
outputs = []
for i in range(num_boxes):
box = boxes[0]
new_boxes = []
new_mapping = {}
# If overlap of box with previously generated output boxes, return
contained = False
for output_box in outputs:
# Carry forward mappings
new_mapping[output_box] = mapping[output_box]
if compute_overlap(mapping, box, output_box) == 1:
contained = True
if contained:
continue
# We know that box has at least one atom not in outputs
unique_box = True
for merge_box in boxes[1:]:
overlap = compute_overlap(mapping, box, merge_box)
if overlap < threshold:
new_boxes.append(merge_box)
new_mapping[merge_box] = mapping[merge_box]
else:
# Current box has been merged into box further down list.
# No need to output current box
unique_box = False
merged = merge_boxes(box, merge_box)
new_boxes.append(merged)
new_mapping[merged] = list(
set(mapping[box]).union(set(mapping[merge_box])))
if unique_box:
outputs.append(box)
new_mapping[box] = mapping[box]
boxes = new_boxes
mapping = new_mapping
return outputs, mapping
class BindingPocketFinder(object):
"""Abstract superclass for binding pocket detectors"""
def find_pockets(self, protein_file, ligand_file):
"""Finds potential binding pockets in proteins."""
raise NotImplementedError
class ConvexHullPocketFinder(BindingPocketFinder):
"""Implementation that uses convex hull of protein to find pockets.
Based on https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4112621/pdf/1472-6807-14-18.pdf
"""
def __init__(self, pad=5):
self.pad = pad
def find_all_pockets(self, protein_file):
"""Find list of binding pockets on protein."""
# protein_coords is (N, 3) tensor
coords = rdkit_util.load_molecule(protein_file)[0]
return get_all_boxes(coords, self.pad)
def find_pockets(self, protein_file, ligand_file):
"""Find list of suitable binding pockets on protein."""
protein_coords = rdkit_util.load_molecule(
protein_file, add_hydrogens=False, calc_charges=False)[0]
ligand_coords = rdkit_util.load_molecule(
ligand_file, add_hydrogens=False, calc_charges=False)[0]
boxes = get_all_boxes(protein_coords, self.pad)
mapping = boxes_to_atoms(protein_coords, boxes)
pockets, pocket_atoms_map = merge_overlapping_boxes(mapping, boxes)
pocket_coords = []
for pocket in pockets:
atoms = pocket_atoms_map[pocket]
coords = np.zeros((len(atoms), 3))
for ind, atom in enumerate(atoms):
coords[ind] = protein_coords[atom]
pocket_coords.append(coords)
return pockets, pocket_atoms_map, pocket_coords
class RFConvexHullPocketFinder(BindingPocketFinder):
"""Uses pre-trained RF model + ConvexHulPocketFinder to select pockets."""
def __init__(self, pad=5):
self.pad = pad
self.convex_finder = ConvexHullPocketFinder(pad)
# Load binding pocket model
self.base_dir = tempfile.mkdtemp()
print("About to download trained model.")
# TODO(rbharath): Shift refined to full once trained.
call((
"wget -nv -c http://deepchem.io.s3-website-us-west-1.amazonaws.com/trained_models/pocket_random_refined_RF.tar.gz"
).split())
call(("tar -zxvf pocket_random_refined_RF.tar.gz").split())
call(("mv pocket_random_refined_RF %s" % (self.base_dir)).split())
self.model_dir = os.path.join(self.base_dir, "pocket_random_refined_RF")
# Fit model on dataset
self.model = SklearnModel(model_dir=self.model_dir)
self.model.reload()
# Create featurizers
self.pocket_featurizer = BindingPocketFeaturizer()
self.ligand_featurizer = CircularFingerprint(size=1024)
def find_pockets(self, protein_file, ligand_file):
"""Compute features for a given complex
TODO(rbharath): This has a log of code overlap with
compute_binding_pocket_features in
examples/binding_pockets/binding_pocket_datasets.py. Find way to refactor
to avoid code duplication.
"""
# if not ligand_file.endswith(".sdf"):
# raise ValueError("Only .sdf ligand files can be featurized.")
# ligand_basename = os.path.basename(ligand_file).split(".")[0]
# ligand_mol2 = os.path.join(
# self.base_dir, ligand_basename + ".mol2")
#
# # Write mol2 file for ligand
# obConversion = ob.OBConversion()
# conv_out = obConversion.SetInAndOutFormats(str("sdf"), str("mol2"))
# ob_mol = ob.OBMol()
# obConversion.ReadFile(ob_mol, str(ligand_file))
# obConversion.WriteFile(ob_mol, str(ligand_mol2))
#
# # Featurize ligand
# mol = Chem.MolFromMol2File(str(ligand_mol2), removeHs=False)
# if mol is None:
# return None, None
# # Default for CircularFingerprint
# n_ligand_features = 1024
# ligand_features = self.ligand_featurizer.featurize([mol])
#
# # Featurize pocket
# pockets, pocket_atoms_map, pocket_coords = self.convex_finder.find_pockets(
# protein_file, ligand_file)
# n_pockets = len(pockets)
# n_pocket_features = BindingPocketFeaturizer.n_features
#
# features = np.zeros((n_pockets, n_pocket_features+n_ligand_features))
# pocket_features = self.pocket_featurizer.featurize(
# protein_file, pockets, pocket_atoms_map, pocket_coords)
# # Note broadcast operation
# features[:, :n_pocket_features] = pocket_features
# features[:, n_pocket_features:] = ligand_features
# dataset = NumpyDataset(X=features)
# pocket_preds = self.model.predict(dataset)
# pocket_pred_proba = np.squeeze(self.model.predict_proba(dataset))
#
# # Find pockets which are active
# active_pockets = []
# active_pocket_atoms_map = {}
# active_pocket_coords = []
# for pocket_ind in range(len(pockets)):
# #################################################### DEBUG
# # TODO(rbharath): For now, using a weak cutoff. Fix later.
# #if pocket_preds[pocket_ind] == 1:
# if pocket_pred_proba[pocket_ind][1] > .15:
# #################################################### DEBUG
# pocket = pockets[pocket_ind]
# active_pockets.append(pocket)
# active_pocket_atoms_map[pocket] = pocket_atoms_map[pocket]
# active_pocket_coords.append(pocket_coords[pocket_ind])
# return active_pockets, active_pocket_atoms_map, active_pocket_coords
# # TODO(LESWING)
raise ValueError("Karl Implement")
| [
[
[
77,
85
]
],
[
[
109,
123
]
],
[
[
147,
163
]
],
[
[
165,
175
]
],
[
[
198,
211
]
],
[
[
252,
263
]
],
[
[
280,
282
],
[
8508,
8510
]
],
[
[
290,
298
],
[
8080,
8088
]
],
[
[
306,
317
],
[
1245,
1247
],
[
1555,
1557
],
[
1702,
1704
],
[
1711,
1713
],
[
1756,
1758
],
[
1764,
1766
],
[
1809,
1811
],
[
1818,
1820
],
[
1863,
1865
],
[
1871,
1873
],
[
1916,
1918
],
[
1925,
1927
],
[
1970,
1972
],
[
1978,
1980
],
[
2858,
2860
],
[
3033,
3035
],
[
3056,
3058
],
[
3101,
3103
],
[
3129,
3131
],
[
3170,
3172
],
[
3193,
3195
],
[
3238,
3240
],
[
3266,
3268
],
[
3307,
3309
],
[
3330,
3332
],
[
3375,
3377
],
[
3403,
3405
],
[
7596,
7598
]
],
[
[
341,
345
],
[
8207,
8211
],
[
8356,
8360
],
[
8420,
8424
]
],
[
[
372,
382
],
[
2648,
2658
]
],
[
[
433,
456
],
[
8727,
8750
]
],
[
[
496,
515
],
[
8782,
8801
]
],
[
[
559,
571
],
[
8609,
8621
]
],
[
[
599,
609
],
[
737,
747
],
[
825,
835
],
[
6893,
6903
],
[
7113,
7123
],
[
7225,
7235
]
],
[
[
616,
635
]
],
[
[
2114,
2129
],
[
5373,
5388
],
[
5609,
5624
]
],
[
[
2429,
2442
],
[
6946,
6959
],
[
7328,
7341
]
],
[
[
3513,
3527
],
[
7382,
7396
]
],
[
[
4287,
4298
],
[
5930,
5941
]
],
[
[
4706,
4729
],
[
7452,
7475
]
],
[
[
6254,
6273
],
[
6510,
6529
],
[
7826,
7845
]
],
[
[
6487,
6509
],
[
7999,
8021
]
],
[
[
7801,
7825
]
]
] |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for checkpointable object SavedModel loading."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
from tensorflow.python.eager import backprop
from tensorflow.python.eager import def_function
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_spec
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import variables
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import save
from tensorflow.python.training.checkpointable import tracking
class LoadTest(test.TestCase):
def cycle(self, obj):
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(obj, path, signatures={})
return load.load(path)
def test_structure_import(self):
root = tracking.Checkpointable()
root.dep_one = tracking.Checkpointable()
root.dep_two = tracking.Checkpointable()
root.dep_two.dep = tracking.Checkpointable()
root.dep_three = root.dep_two.dep
imported = self.cycle(root)
self.assertIs(imported.dep_three, imported.dep_two.dep)
self.assertIsNot(imported.dep_one, imported.dep_two)
def test_variables(self):
root = tracking.Checkpointable()
root.v1 = variables.Variable(1., trainable=True)
root.v2 = variables.Variable(2., trainable=False)
imported = self.cycle(root)
self.assertEquals(imported.v1.numpy(), 1.0)
self.assertTrue(imported.v1.trainable)
self.assertEquals(imported.v2.numpy(), 2.0)
self.assertFalse(imported.v2.trainable)
def test_capture_variables(self):
root = tracking.Checkpointable()
root.weights = variables.Variable(2.)
root.f = def_function.function(
lambda x: root.weights * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
imported = self.cycle(root)
self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
imported.weights.assign(4.0)
self.assertEqual(8., imported.f(constant_op.constant(2.)).numpy())
def _make_asset(self, contents):
filename = tempfile.mktemp(prefix=self.get_temp_dir())
with open(filename, "w") as f:
f.write(contents)
return filename
def test_assets(self):
file1 = self._make_asset("contents 1")
file2 = self._make_asset("contents 2")
root = tracking.Checkpointable()
root.asset1 = tracking.TrackableAsset(file1)
root.asset2 = tracking.TrackableAsset(file2)
save_dir = os.path.join(self.get_temp_dir(), "save_dir")
save.save(root, save_dir, signatures={})
file_io.delete_file(file1)
file_io.delete_file(file2)
load_dir = os.path.join(self.get_temp_dir(), "load_dir")
file_io.rename(save_dir, load_dir)
imported = load.load(load_dir)
with open(imported.asset1.asset_path.numpy(), "r") as f:
self.assertEquals("contents 1", f.read())
with open(imported.asset2.asset_path.numpy(), "r") as f:
self.assertEquals("contents 2", f.read())
def test_capture_assets(self):
root = tracking.Checkpointable()
root.vocab = tracking.TrackableAsset(self._make_asset("contents"))
root.f = def_function.function(
lambda: root.vocab.asset_path,
input_signature=[])
imported = self.cycle(root)
origin_output = root.f().numpy()
imported_output = imported.f().numpy()
self.assertNotEqual(origin_output, imported_output)
with open(imported_output, "r") as f:
self.assertEquals("contents", f.read())
def test_dedup_assets(self):
vocab = self._make_asset("contents")
root = tracking.Checkpointable()
root.asset1 = tracking.TrackableAsset(vocab)
root.asset2 = tracking.TrackableAsset(vocab)
imported = self.cycle(root)
self.assertEqual(imported.asset1.asset_path.numpy(),
imported.asset2.asset_path.numpy())
def test_implicit_input_signature(self):
@def_function.function
def func(x):
return 2 * x
root = tracking.Checkpointable()
root.f = func
# Add two traces.
root.f(constant_op.constant(1.))
root.f(constant_op.constant(1))
imported = self.cycle(root)
self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
self.assertEqual(14, imported.f(constant_op.constant(7)).numpy())
def test_explicit_input_signature(self):
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def func(x):
return 2 * x
root = tracking.Checkpointable()
root.f = func
imported = self.cycle(root)
self.assertEqual(4., imported.f(constant_op.constant(2.0)).numpy())
def test_nested_functions(self):
f = def_function.function(
lambda x: x*2.0,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
g = def_function.function(
lambda x: f(x) + 1.0,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root = tracking.Checkpointable()
root.g = g
imported = self.cycle(root)
imported.g(constant_op.constant([1.0]))
def test_function_with_default_bool_input(self):
def func(x, training=False):
if training:
return 2 * x
else:
return 7
root = tracking.Checkpointable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(7, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
imported = self.cycle(root)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(7, imported.f(constant_op.constant(2)).numpy())
def test_positional_arguments(self):
def func(x, training=False, abc=7.1, defg=7.7):
del abc
if training:
return 2 * x
if defg == 7:
return 6
else:
return 7
root = tracking.Checkpointable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(7, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
self.assertEqual(6, root.f(constant_op.constant(1), defg=7.0).numpy())
imported = self.cycle(root)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(7, imported.f(constant_op.constant(2)).numpy())
self.assertEqual(6, imported.f(constant_op.constant(1), defg=7.0).numpy())
def test_member_function(self):
class CheckpointableWithMember(tracking.Checkpointable):
def __init__(self):
super(CheckpointableWithMember, self).__init__()
self._some_value = 20
@def_function.function
def f(self, x, training=False):
if training:
return 2 * x
else:
return 7 + self._some_value
root = CheckpointableWithMember()
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(27, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
imported = self.cycle(root)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(27, imported.f(constant_op.constant(2)).numpy())
def test_side_effect_listing(self):
class M(tracking.Checkpointable):
def __init__(self):
super(M, self).__init__()
self.var = None
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def f(self, x):
if self.var is None:
self.var = variables.Variable(2.)
return x * self.var
m = M()
self.cycle(m)
self.assertEquals(4.0, m.f(constant_op.constant(2.0)).numpy())
def test_basic_backprop(self):
weight = variables.Variable(1., trainable=True)
bias = variables.Variable(0., trainable=True)
g = def_function.function(
lambda x: x*weight + bias,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root = tracking.Checkpointable()
root.weight = weight
root.bias = bias
root.g = g
imported = self.cycle(root)
with backprop.GradientTape(watch_accessed_variables=True) as t:
x = constant_op.constant([3.5])
loss = imported.g(x)
grad = t.gradient(loss, [imported.weight, imported.bias])
self.assertAllClose(grad, [3.5, 1.0])
def test_callable(self):
class M1(tracking.Checkpointable):
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def __call__(self, x):
return x
root = tracking.Checkpointable()
root.m1 = M1()
root.m2 = tracking.Checkpointable()
root.m2.__call__ = def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])(
lambda x: x*3.0)
imported = self.cycle(root)
x = constant_op.constant(1.0)
self.assertTrue(callable(imported.m1))
self.assertAllEqual(root.m1(x), imported.m1(x))
# Note: `root.m2` was not callable since `__call__` attribute was set
# into the instance and not on the class. But after a serialization cycle
# that starts to work.
self.assertTrue(callable(imported.m2))
self.assertAllEqual(root.m2.__call__(x), imported.m2(x))
# Verify that user objects without `__call__` attribute are not callable.
self.assertFalse(callable(imported))
def test_chain_callable(self):
func = def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])(
lambda x: x*3.0)
root = tracking.Checkpointable()
root.__call__ = tracking.Checkpointable()
root.__call__.__call__ = tracking.Checkpointable()
root.__call__.__call__.__call__ = func
imported = self.cycle(root)
self.assertTrue(callable(imported))
x = constant_op.constant(1.0)
self.assertAllEqual(imported(x).numpy(), 3.0)
if __name__ == "__main__":
test.main()
| [
[
[
771,
786
]
],
[
[
810,
818
]
],
[
[
842,
856
]
],
[
[
865,
867
],
[
3290,
3292
],
[
3459,
3461
]
],
[
[
875,
883
],
[
1486,
1494
],
[
2903,
2911
]
],
[
[
921,
929
],
[
9014,
9022
]
],
[
[
966,
978
],
[
2514,
2526
],
[
3953,
3965
],
[
4702,
4714
],
[
5136,
5148
],
[
5472,
5484
],
[
5600,
5612
],
[
6060,
6072
],
[
6739,
6751
],
[
7527,
7539
],
[
8282,
8294
],
[
8744,
8756
],
[
9321,
9333
],
[
9584,
9596
],
[
10320,
10332
]
],
[
[
1015,
1019
],
[
1434,
1438
],
[
10814,
10818
]
],
[
[
1060,
1071
],
[
2713,
2724
],
[
2817,
2828
],
[
4850,
4861
],
[
4887,
4898
],
[
4982,
4993
],
[
5053,
5064
],
[
5392,
5403
],
[
5825,
5836
],
[
6121,
6132
],
[
6193,
6204
],
[
6258,
6269
],
[
6367,
6378
],
[
6442,
6453
],
[
6800,
6811
],
[
6872,
6883
],
[
6937,
6948
],
[
7008,
7019
],
[
7121,
7132
],
[
7196,
7207
],
[
7265,
7276
],
[
7755,
7766
],
[
7828,
7839
],
[
7893,
7904
],
[
8002,
8013
],
[
8078,
8089
],
[
8564,
8575
],
[
9083,
9094
],
[
9749,
9760
],
[
10707,
10718
]
],
[
[
1112,
1118
],
[
2627,
2633
],
[
5213,
5219
],
[
5574,
5580
],
[
5707,
5713
],
[
8361,
8367
],
[
8856,
8862
],
[
9400,
9406
],
[
9661,
9667
],
[
10397,
10403
]
],
[
[
1159,
1170
],
[
2598,
2609
],
[
5184,
5195
],
[
5545,
5556
],
[
5678,
5689
],
[
8332,
8343
],
[
8827,
8838
],
[
9371,
9382
],
[
9632,
9643
],
[
10368,
10379
]
],
[
[
1208,
1215
],
[
3386,
3393
],
[
3417,
3424
],
[
3509,
3516
]
],
[
[
1250,
1259
],
[
2077,
2086
],
[
2130,
2139
],
[
2478,
2487
],
[
8647,
8656
],
[
8697,
8706
],
[
8451,
8460
]
],
[
[
1302,
1306
],
[
1582,
1586
],
[
3560,
3564
]
],
[
[
1349,
1353
],
[
1535,
1539
],
[
3340,
3344
]
],
[
[
1408,
1416
],
[
1645,
1653
],
[
1690,
1698
],
[
1735,
1743
],
[
1784,
1792
],
[
2037,
2045
],
[
2433,
2441
],
[
3150,
3158
],
[
3194,
3202
],
[
3243,
3251
],
[
3843,
3851
],
[
3886,
3894
],
[
4383,
4391
],
[
4427,
4435
],
[
4476,
4484
],
[
4772,
4780
],
[
5279,
5287
],
[
5737,
5745
],
[
6021,
6029
],
[
6700,
6708
],
[
7379,
7387
],
[
8163,
8171
],
[
8886,
8894
],
[
9287,
9295
],
[
9476,
9484
],
[
9535,
9543
],
[
10456,
10464
],
[
10502,
10510
],
[
10557,
10565
]
],
[
[
1425,
1433
]
]
] |
from selenium import webdriver
import time
from bs4 import BeautifulSoup
import requests
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.PhantomJS()
driver.set_window_size(1120,550)
driver.get("http:10.10.2.1")
#time.sleep(5)
res=0
try:
e1=WebDriverWait(driver,10).until(
EC.presence_of_element_located((By.NAME,"username"))
)
e2=WebDriverWait(driver,10).until(
EC.presence_of_element_located((By.NAME,"passwd"))
)
e3=WebDriverWait(driver,10).until(
EC.presence_of_element_located((By.LINK_TEXT,"Login"))
)
driver.find_element_by_name('username').send_keys('Your_username')
driver.find_element_by_name('passwd').send_keys('Your_password')
driver.find_element_by_name('rememberme').click()
#res=e1&e2&e3
res=BeautifulSoup(driver.page_source)
if "Connected(Default Internet)" not in res.text :
driver.find_element_by_css_selector('.field2 input').click()
#print(res)
res=1
#time.sleep(5)
#a=driver.find_element_by_css_selector('.field2'(1))
#print(a)
#print(driver.current_url)
finally:
if res :
print("Successful!!")
time.sleep(5)
else:
print("Failed :(")
driver.quit()
| [
[
[
21,
30
],
[
263,
272
]
],
[
[
38,
42
],
[
1257,
1261
]
],
[
[
59,
72
],
[
900,
913
]
],
[
[
80,
88
]
],
[
[
130,
132
],
[
449,
451
],
[
551,
553
],
[
651,
653
]
],
[
[
175,
188
],
[
381,
394
],
[
483,
496
],
[
583,
596
]
],
[
[
228,
253
],
[
417,
419
],
[
519,
521
],
[
619,
621
]
],
[
[
254,
260
],
[
285,
291
],
[
319,
325
],
[
395,
401
],
[
497,
503
],
[
597,
603
],
[
684,
690
],
[
755,
761
],
[
824,
830
],
[
914,
920
],
[
997,
1003
],
[
1308,
1314
]
],
[
[
363,
366
],
[
1213,
1216
]
],
[
[
378,
380
]
],
[
[
480,
482
]
],
[
[
580,
582
]
],
[
[
896,
899
],
[
978,
981
]
],
[
[
1078,
1081
],
[
1213,
1216
]
]
] |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenization classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
import unicodedata
import six
import tensorflow as tf
def validate_case_matches_checkpoint(do_lower_case, init_checkpoint):
"""Checks whether the casing config is consistent with the checkpoint name."""
# The casing has to be passed in by the user and there is no explicit check
# as to whether it matches the checkpoint. The casing information probably
# should have been stored in the bert_config.json file, but it's not, so
# we have to heuristically detect it to validate.
if not init_checkpoint:
return
m = re.match("^.*?([A-Za-z0-9_-]+)/bert_model.ckpt", init_checkpoint)
if m is None:
return
model_name = m.group(1)
lower_models = [
"uncased_L-24_H-1024_A-16", "uncased_L-12_H-768_A-12",
"multilingual_L-12_H-768_A-12", "chinese_L-12_H-768_A-12"
]
cased_models = [
"cased_L-12_H-768_A-12", "cased_L-24_H-1024_A-16",
"multi_cased_L-12_H-768_A-12"
]
is_bad_config = False
if model_name in lower_models and not do_lower_case:
is_bad_config = True
actual_flag = "False"
case_name = "lowercased"
opposite_flag = "True"
if model_name in cased_models and do_lower_case:
is_bad_config = True
actual_flag = "True"
case_name = "cased"
opposite_flag = "False"
if is_bad_config:
raise ValueError(
"You passed in `--do_lower_case=%s` with `--init_checkpoint=%s`. "
"However, `%s` seems to be a %s model, so you "
"should pass in `--do_lower_case=%s` so that the fine-tuning matches "
"how the model was pre-training. If this error is wrong, please "
"just comment out this check." % (actual_flag, init_checkpoint,
model_name, case_name, opposite_flag))
def convert_to_unicode(text):
"""Converts `text` to Unicode (if it's not already), assuming utf-8 input."""
if six.PY3:
if isinstance(text, str):
return text
elif isinstance(text, bytes):
return text.decode("utf-8", "ignore")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
elif six.PY2:
if isinstance(text, str):
return text.decode("utf-8", "ignore")
elif isinstance(text, unicode):
return text
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
else:
raise ValueError("Not running on Python2 or Python 3?")
def printable_text(text):
"""Returns text encoded in a way suitable for print or `tf.logging`."""
# These functions want `str` for both Python2 and Python3, but in one case
# it's a Unicode string and in the other it's a byte string.
if six.PY3:
if isinstance(text, str):
return text
elif isinstance(text, bytes):
return text.decode("utf-8", "ignore")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
elif six.PY2:
if isinstance(text, str):
return text
elif isinstance(text, unicode):
return text.encode("utf-8")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
else:
raise ValueError("Not running on Python2 or Python 3?")
def load_vocab(vocab_file):
"""Loads a vocabulary file into a dictionary."""
vocab = collections.OrderedDict()
index = 0
with tf.gfile.GFile(vocab_file, "r") as reader:
while True:
token = convert_to_unicode(reader.readline())
if not token:
break
token = token.strip()
vocab[token] = index
index += 1
return vocab
def convert_by_vocab(vocab, items):
"""Converts a sequence of [tokens|ids] using the vocab."""
output = []
#print("items:",items) #['[CLS]', '日', '##期', ',', '但', '被', '##告', '金', '##东', '##福', '载', '##明', '[MASK]', 'U', '##N', '##K', ']', '保', '##证', '本', '##月', '1', '##4', '[MASK]', '到', '##位', ',', '2', '##0', '##1', '##5', '年', '6', '[MASK]', '1', '##1', '日', '[', 'U', '##N', '##K', ']', ',', '原', '##告', '[MASK]', '认', '##可', '于', '2', '##0', '##1', '##5', '[MASK]', '6', '月', '[MASK]', '[MASK]', '日', '##向', '被', '##告', '主', '##张', '权', '##利', '。', '而', '[MASK]', '[MASK]', '自', '[MASK]', '[MASK]', '[MASK]', '[MASK]', '年', '6', '月', '1', '##1', '日', '[SEP]', '原', '##告', '于', '2', '##0', '##1', '##6', '[MASK]', '6', '[MASK]', '2', '##4', '日', '起', '##诉', ',', '主', '##张', '保', '##证', '责', '##任', ',', '已', '超', '##过', '保', '##证', '期', '##限', '[MASK]', '保', '##证', '人', '依', '##法', '不', '##再', '承', '##担', '保', '##证', '[MASK]', '[MASK]', '[MASK]', '[SEP]']
for i,item in enumerate(items):
#print(i,"item:",item) # ##期
output.append(vocab[item])
return output
def convert_tokens_to_ids(vocab, tokens):
return convert_by_vocab(vocab, tokens)
def convert_ids_to_tokens(inv_vocab, ids):
return convert_by_vocab(inv_vocab, ids)
def whitespace_tokenize(text):
"""Runs basic whitespace cleaning and splitting on a piece of text."""
text = text.strip()
if not text:
return []
tokens = text.split()
return tokens
class FullTokenizer(object):
"""Runs end-to-end tokenziation."""
def __init__(self, vocab_file, do_lower_case=True):
self.vocab = load_vocab(vocab_file)
self.inv_vocab = {v: k for k, v in self.vocab.items()}
self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case)
self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
def tokenize(self, text):
split_tokens = []
for token in self.basic_tokenizer.tokenize(text):
for sub_token in self.wordpiece_tokenizer.tokenize(token):
split_tokens.append(sub_token)
return split_tokens
def convert_tokens_to_ids(self, tokens):
return convert_by_vocab(self.vocab, tokens)
def convert_ids_to_tokens(self, ids):
return convert_by_vocab(self.inv_vocab, ids)
class BasicTokenizer(object):
"""Runs basic tokenization (punctuation splitting, lower casing, etc.)."""
def __init__(self, do_lower_case=True):
"""Constructs a BasicTokenizer.
Args:
do_lower_case: Whether to lower case the input.
"""
self.do_lower_case = do_lower_case
def tokenize(self, text):
"""Tokenizes a piece of text."""
text = convert_to_unicode(text)
text = self._clean_text(text)
# This was added on November 1st, 2018 for the multilingual and Chinese
# models. This is also applied to the English models now, but it doesn't
# matter since the English models were not trained on any Chinese data
# and generally don't have any Chinese data in them (there are Chinese
# characters in the vocabulary because Wikipedia does have some Chinese
# words in the English Wikipedia.).
text = self._tokenize_chinese_chars(text)
orig_tokens = whitespace_tokenize(text)
split_tokens = []
for token in orig_tokens:
if self.do_lower_case:
token = token.lower()
token = self._run_strip_accents(token)
split_tokens.extend(self._run_split_on_punc(token))
output_tokens = whitespace_tokenize(" ".join(split_tokens))
return output_tokens
def _run_strip_accents(self, text):
"""Strips accents from a piece of text."""
text = unicodedata.normalize("NFD", text)
output = []
for char in text:
cat = unicodedata.category(char)
if cat == "Mn":
continue
output.append(char)
return "".join(output)
def _run_split_on_punc(self, text):
"""Splits punctuation on a piece of text."""
chars = list(text)
i = 0
start_new_word = True
output = []
while i < len(chars):
char = chars[i]
if _is_punctuation(char):
output.append([char])
start_new_word = True
else:
if start_new_word:
output.append([])
start_new_word = False
output[-1].append(char)
i += 1
return ["".join(x) for x in output]
def _tokenize_chinese_chars(self, text):
"""Adds whitespace around any CJK character."""
output = []
for char in text:
cp = ord(char)
if self._is_chinese_char(cp):
output.append(" ")
output.append(char)
output.append(" ")
else:
output.append(char)
return "".join(output)
def _is_chinese_char(self, cp):
"""Checks whether CP is the codepoint of a CJK character."""
# This defines a "chinese character" as anything in the CJK Unicode block:
# https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)
#
# Note that the CJK Unicode block is NOT all Japanese and Korean characters,
# despite its name. The modern Korean Hangul alphabet is a different block,
# as is Japanese Hiragana and Katakana. Those alphabets are used to write
# space-separated words, so they are not treated specially and handled
# like the all of the other languages.
if ((cp >= 0x4E00 and cp <= 0x9FFF) or #
(cp >= 0x3400 and cp <= 0x4DBF) or #
(cp >= 0x20000 and cp <= 0x2A6DF) or #
(cp >= 0x2A700 and cp <= 0x2B73F) or #
(cp >= 0x2B740 and cp <= 0x2B81F) or #
(cp >= 0x2B820 and cp <= 0x2CEAF) or
(cp >= 0xF900 and cp <= 0xFAFF) or #
(cp >= 0x2F800 and cp <= 0x2FA1F)): #
return True
return False
def _clean_text(self, text):
"""Performs invalid character removal and whitespace cleanup on text."""
output = []
for char in text:
cp = ord(char)
if cp == 0 or cp == 0xfffd or _is_control(char):
continue
if _is_whitespace(char):
output.append(" ")
else:
output.append(char)
return "".join(output)
class WordpieceTokenizer(object):
"""Runs WordPiece tokenziation."""
def __init__(self, vocab, unk_token="[UNK]", max_input_chars_per_word=200):
self.vocab = vocab
self.unk_token = unk_token
self.max_input_chars_per_word = max_input_chars_per_word
def tokenize(self, text):
"""Tokenizes a piece of text into its word pieces.
This uses a greedy longest-match-first algorithm to perform tokenization
using the given vocabulary.
For example:
input = "unaffable"
output = ["un", "##aff", "##able"]
Args:
text: A single token or whitespace separated tokens. This should have
already been passed through `BasicTokenizer.
Returns:
A list of wordpiece tokens.
"""
text = convert_to_unicode(text)
output_tokens = []
for token in whitespace_tokenize(text):
chars = list(token)
if len(chars) > self.max_input_chars_per_word:
output_tokens.append(self.unk_token)
continue
is_bad = False
start = 0
sub_tokens = []
while start < len(chars):
end = len(chars)
cur_substr = None
while start < end:
substr = "".join(chars[start:end])
if start > 0:
substr = "##" + substr
if substr in self.vocab:
cur_substr = substr
break
end -= 1
if cur_substr is None:
is_bad = True
break
sub_tokens.append(cur_substr)
start = end
if is_bad:
output_tokens.append(self.unk_token)
else:
output_tokens.extend(sub_tokens)
return output_tokens
def _is_whitespace(char):
"""Checks whether `chars` is a whitespace character."""
# \t, \n, and \r are technically contorl characters but we treat them
# as whitespace since they are generally considered as such.
if char == " " or char == "\t" or char == "\n" or char == "\r":
return True
cat = unicodedata.category(char)
if cat == "Zs":
return True
return False
def _is_control(char):
"""Checks whether `chars` is a control character."""
# These are technically control characters but we count them as whitespace
# characters.
if char == "\t" or char == "\n" or char == "\r":
return False
cat = unicodedata.category(char)
if cat in ("Cc", "Cf"):
return True
return False
def _is_punctuation(char):
"""Checks whether `chars` is a punctuation character."""
cp = ord(char)
# We treat all non-letter/number ASCII as punctuation.
# Characters such as "^", "$", and "`" are not in the Unicode
# Punctuation class but we treat them as punctuation anyways, for
# consistency.
if ((cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or
(cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126)):
return True
cat = unicodedata.category(char)
if cat.startswith("P"):
return True
return False
| [
[
[
667,
682
]
],
[
[
706,
714
]
],
[
[
738,
752
]
],
[
[
761,
772
],
[
3981,
3992
]
],
[
[
780,
782
],
[
1318,
1320
]
],
[
[
790,
801
],
[
7845,
7856
],
[
7930,
7941
],
[
12217,
12228
],
[
12542,
12553
],
[
13082,
13093
]
],
[
[
809,
812
],
[
2642,
2645
],
[
2863,
2866
],
[
3395,
3398
],
[
3616,
3619
]
],
[
[
820,
836
],
[
4026,
4028
]
],
[
[
843,
875
]
],
[
[
2531,
2549
],
[
4099,
4117
],
[
6872,
6890
],
[
11024,
11042
]
],
[
[
3153,
3167
]
],
[
[
3896,
3906
],
[
5858,
5868
]
],
[
[
4264,
4280
],
[
5401,
5417
],
[
5487,
5503
],
[
6368,
6384
],
[
6457,
6473
]
],
[
[
5354,
5375
]
],
[
[
5439,
5460
]
],
[
[
5526,
5545
],
[
7416,
7435
],
[
7679,
7698
],
[
11090,
11109
]
],
[
[
5725,
5738
]
],
[
[
6503,
6517
],
[
5967,
5981
]
],
[
[
10278,
10296
],
[
6042,
6060
]
],
[
[
11912,
11926
],
[
10154,
10168
]
],
[
[
12299,
12310
],
[
10109,
10120
]
],
[
[
12632,
12647
],
[
8269,
8284
]
]
] |
from __future__ import annotations
from decimal import Decimal
def inverse_of_matrix(matrix: list[list[float]]) -> list[list[float]]:
"""
A matrix multiplied with its inverse gives the identity matrix.
This function finds the inverse of a 2x2 matrix.
If the determinant of a matrix is 0, its inverse does not exist.
Sources for fixing inaccurate float arithmetic:
https://stackoverflow.com/questions/6563058/how-do-i-use-accurate-float-arithmetic-in-python
https://docs.python.org/3/library/decimal.html
>>> inverse_of_matrix([[2, 5], [2, 0]])
[[0.0, 0.5], [0.2, -0.2]]
>>> inverse_of_matrix([[2.5, 5], [1, 2]])
Traceback (most recent call last):
...
ValueError: This matrix has no inverse.
>>> inverse_of_matrix([[12, -16], [-9, 0]])
[[0.0, -0.1111111111111111], [-0.0625, -0.08333333333333333]]
>>> inverse_of_matrix([[12, 3], [16, 8]])
[[0.16666666666666666, -0.0625], [-0.3333333333333333, 0.25]]
>>> inverse_of_matrix([[10, 5], [3, 2.5]])
[[0.25, -0.5], [-0.3, 1.0]]
"""
D = Decimal # An abbreviation to be conciseness
# Calculate the determinant of the matrix
determinant = D(matrix[0][0]) * D(matrix[1][1]) - D(matrix[1][0]) * D(matrix[0][1])
if determinant == 0:
raise ValueError("This matrix has no inverse.")
# Creates a copy of the matrix with swapped positions of the elements
swapped_matrix = [[0.0, 0.0], [0.0, 0.0]]
swapped_matrix[0][0], swapped_matrix[1][1] = matrix[1][1], matrix[0][0]
swapped_matrix[1][0], swapped_matrix[0][1] = -matrix[1][0], -matrix[0][1]
# Calculate the inverse of the matrix
return [[float(D(n) / determinant) or 0.0 for n in row] for row in swapped_matrix]
| [
[
[
23,
34
]
],
[
[
56,
63
],
[
1070,
1077
]
],
[
[
70,
87
]
]
] |
# Copyright (c) Ye Liu. All rights reserved.
import nncore
def test_bind_getter():
@nncore.bind_getter('name', 'depth')
class Backbone:
_name = 'ResNet'
_depth = 50
backbone = Backbone()
assert backbone.name == 'ResNet'
assert backbone.depth == 50
| [
[
[
53,
59
],
[
92,
98
]
],
[
[
66,
82
]
]
] |
# Generated by Django 2.1.4 on 2019-03-24 19:19
import datetime
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('name', models.CharField(blank=True, max_length=16, null=True, verbose_name='用户名')),
('gender', models.CharField(choices=[('male', '男'), ('female', '女')], default='female', max_length=6, verbose_name='性别')),
('mobile', models.CharField(blank=True, max_length=11, null=True, verbose_name='电话')),
('email', models.CharField(blank=True, max_length=100, null=True, verbose_name='邮箱')),
('top_img', models.ImageField(max_length=200, null=True, upload_to='user/')),
('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': '用户',
'verbose_name_plural': '用户',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| [
[
[
56,
64
],
[
2591,
2599
]
],
[
[
72,
98
]
],
[
[
106,
136
]
],
[
[
159,
169
],
[
225,
235
],
[
382,
392
]
],
[
[
171,
177
],
[
482,
488
],
[
603,
609
],
[
694,
700
],
[
800,
806
],
[
993,
999
],
[
1325,
1331
],
[
1428,
1434
],
[
1530,
1536
],
[
1697,
1703
],
[
1903,
1909
],
[
2014,
2020
],
[
2118,
2124
],
[
2257,
2263
],
[
2359,
2365
],
[
2464,
2470
],
[
2562,
2568
],
[
2664,
2670
],
[
2941,
2947
]
],
[
[
185,
206
],
[
1207,
1213
],
[
1932,
1938
],
[
3320,
3326
]
],
[
[
215,
224
]
]
] |
import os
import click
from yoda.ssh.shell import Shell
from yoda.ssh.config import importHost
# Context obj
class Cmd():
def __init__(self):
self.verbose = False
self.shell = None
self.host = None
pass_cmd = click.make_pass_decorator(Cmd, ensure=True)
class CmdsLoader(click.MultiCommand):
_cmdFolder = os.path.abspath(os.path.join(os.path.dirname(__file__), 'cmds'))
def list_commands(self, ctx):
rv = []
for filename in os.listdir(self._cmdFolder):
if filename.endswith('.py'):
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
try:
cmdFullName = 'yoda.cmds.' + name
mod = __import__(cmdFullName, None, None, ['cmd'])
except ImportError:
return
return mod.cmd
@click.command(cls=CmdsLoader)
@click.option('-v', '--verbose', count=True, help="Explain what is being done")
@click.option('-i', '--interactive', count=True, help="Show all the output from the established remote shell session")
@click.option('-f', '--force', is_flag=True, help="Force the execution of the commands if one fails")
@click.option('-h', '--host', default="myserver", help="The name of the connection defined in ~/.ssh/config file")
@click.pass_context
def yoda(ctx, verbose, interactive, force, host):
shell = Shell(host)
hostConfig = importHost(host)
shell.setConfig(hostConfig[0]['options'])
shell.connect()
if (verbose):
click.echo("Connected to host %s" % host)
# Setting up cmd context
shell.interactive = bool(interactive)
shell.force = force
cmd = Cmd()
cmd.shell = shell
cmd.host = host
cmd.verbose = verbose
ctx.obj = cmd
# TODO
# vim /etc/ssh/sshd_config
# edit Port 17000
# edit PermitRootLogin without-password
# service ssh reload
| [
[
[
7,
9
],
[
323,
325
],
[
339,
341
],
[
352,
354
],
[
453,
455
]
],
[
[
17,
22
],
[
225,
230
],
[
287,
292
],
[
779,
784
],
[
810,
815
],
[
890,
895
],
[
1009,
1014
],
[
1111,
1116
],
[
1226,
1231
],
[
1431,
1436
]
],
[
[
50,
55
],
[
1305,
1310
]
],
[
[
84,
94
],
[
1332,
1342
]
],
[
[
116,
119
],
[
251,
254
],
[
1571,
1574
]
],
[
[
214,
222
]
],
[
[
276,
286
],
[
797,
807
]
],
[
[
1249,
1253
]
]
] |
"""This module supports puzzles that place fixed shape regions into the grid."""
from collections import defaultdict
import sys
from typing import Dict, List
from z3 import ArithRef, Int, IntVal, Or, Solver, PbEq
from .fastz3 import fast_and, fast_eq, fast_ne
from .geometry import Lattice, Point, Vector
from .quadtree import ExpressionQuadTree
# Key types for use with the ExpressionQuadTree when adding shape instance
# constraints.
HAS_INSTANCE_ID, NOT_HAS_INSTANCE_ID, HAS_SHAPE_TYPE = range(3)
def canonicalize_shape(shape: List[Vector]) -> List[Vector]:
"""Returns a new shape that's canonicalized.
A canonicalized shape is in sorted order and its first offset is Vector(0, 0).
This helps with deduplication, since equivalent shapes will be canonicalized
identically.
# Arguments
shape (List[Vector]): A list of offsets defining a shape.
# Returns
(List[Vector]): A list of offsets defining the canonicalized version
of the shape, i.e., in sorted order and with first offset equal
to Vector(0, 0).
"""
shape = sorted(shape)
first_negated = shape[0].negate()
return [v.translate(first_negated) for v in shape]
class ShapeConstrainer:
"""Creates constraints for placing fixed shape regions into the grid.
# Arguments
lattice (Lattice): The structure of the grid.
shapes (List[List[Vector]]): A list of region shape definitions.
Each region shape definition should be a list of offsets.
The same region shape definition may be included multiple times to
indicate the number of times that shape may appear (if allow_copies
is false).
solver (z3.Solver, None): A #Solver object. If None, a #Solver will be
constructed.
complete (bool): If true, every cell must be part of a shape region. Defaults
to false.
allow_rotations (bool): If true, allow rotations of the shapes to be placed
in the grid. Defaults to false.
allow_reflections (bool): If true, allow reflections of the shapes to be
placed in the grid. Defaults to false.
allow_copies (bool): If true, allow any number of copies of the shapes to be
placed in the grid. Defaults to false.
"""
_instance_index = 0
def __init__( # pylint: disable=R0913
self,
lattice: Lattice,
shapes: List[List[Vector]],
solver: Solver = None,
complete: bool = False,
allow_rotations: bool = False,
allow_reflections: bool = False,
allow_copies: bool = False
):
ShapeConstrainer._instance_index += 1
if solver:
self.__solver = solver
else:
self.__solver = Solver()
self.__lattice = lattice
self.__complete = complete
self.__allow_copies = allow_copies
self.__shapes = shapes
self.__make_variants(allow_rotations, allow_reflections)
self.__create_grids()
self.__add_constraints()
def __make_variants(self, allow_rotations, allow_reflections):
fs = self.__lattice.transformation_functions(
allow_rotations, allow_reflections)
self.__variants = [
[
list(shape_tuple)
for shape_tuple in {
tuple(canonicalize_shape([f(v) for v in s]))
for f in fs
}
]
for s in self.__shapes
]
def __create_grids(self):
"""Create the grids used to model shape region constraints."""
self.__shape_type_grid: Dict[Point, ArithRef] = {}
for p in self.__lattice.points:
v = Int(f"scst-{ShapeConstrainer._instance_index}-{p.y}-{p.x}")
if self.__complete:
self.__solver.add(v >= 0)
else:
self.__solver.add(v >= -1)
self.__solver.add(v < len(self.__shapes))
self.__shape_type_grid[p] = v
self.__shape_instance_grid: Dict[Point, ArithRef] = {}
for p in self.__lattice.points:
v = Int(f"scsi-{ShapeConstrainer._instance_index}-{p.y}-{p.x}")
if self.__complete:
self.__solver.add(v >= 0)
else:
self.__solver.add(v >= -1)
self.__solver.add(v < len(self.__lattice.points))
self.__shape_instance_grid[p] = v
def __add_constraints(self):
self.__add_grid_agreement_constraints()
self.__add_shape_instance_constraints()
if not self.__allow_copies:
for shape_index, shape in enumerate(self.__shapes):
self.__add_single_copy_constraints(shape_index, shape)
def __add_grid_agreement_constraints(self):
for p in self.__shape_type_grid:
self.__solver.add(
Or(
fast_and(
self.__shape_type_grid[p] == -1,
self.__shape_instance_grid[p] == -1
),
fast_and(
self.__shape_type_grid[p] != -1,
self.__shape_instance_grid[p] != -1
)
)
)
def __add_shape_instance_constraints(self): # pylint: disable=R0914
int_vals = {}
for i in range(max(len(self.__lattice.points), len(self.__variants))):
int_vals[i] = IntVal(i)
quadtree = ExpressionQuadTree(self.__lattice.points)
for instance_id in [self.__lattice.point_to_index(p) for p in self.__lattice.points]:
quadtree.add_expr(
(HAS_INSTANCE_ID, instance_id),
lambda p, i=instance_id: fast_eq(self.__shape_instance_grid[p], int_vals[i]))
quadtree.add_expr(
(NOT_HAS_INSTANCE_ID, instance_id),
lambda p, i=instance_id: fast_ne(self.__shape_instance_grid[p], int_vals[i]))
for shape_index in range(len(self.__variants)):
quadtree.add_expr(
(HAS_SHAPE_TYPE, shape_index),
lambda p, i=shape_index: fast_eq(self.__shape_type_grid[p], int_vals[i]))
root_options = defaultdict(list)
for shape_index, variants in enumerate(self.__variants): # pylint: disable=R1702
for variant in variants:
for root_point in self.__lattice.points:
instance_id = self.__lattice.point_to_index(root_point)
offset_points = set()
for offset_vector in variant:
point = root_point.translate(offset_vector)
if point not in self.__shape_instance_grid:
offset_points = None
break
offset_points.add(point)
if offset_points:
and_terms = []
for p in offset_points:
and_terms.append(quadtree.get_point_expr((HAS_INSTANCE_ID, instance_id), p))
and_terms.append(quadtree.get_point_expr((HAS_SHAPE_TYPE, shape_index), p))
and_terms.append(quadtree.get_other_points_expr(
(NOT_HAS_INSTANCE_ID, instance_id), offset_points))
root_options[root_point].append(fast_and(*and_terms))
for p in self.__lattice.points:
instance_id = self.__lattice.point_to_index(p)
not_has_instance_id_expr = quadtree.get_other_points_expr(
(NOT_HAS_INSTANCE_ID, instance_id), [])
or_terms = root_options[p]
if or_terms:
or_terms.append(not_has_instance_id_expr)
self.__solver.add(Or(*or_terms))
else:
self.__solver.add(not_has_instance_id_expr)
def __add_single_copy_constraints(self, shape_index, shape):
sum_terms = []
for p in self.__shape_type_grid:
sum_terms.append((self.__shape_type_grid[p] == shape_index, 1))
self.__solver.add(PbEq(sum_terms, len(shape)))
@property
def solver(self) -> Solver:
"""(z3.Solver): The #Solver associated with this #ShapeConstrainer."""
return self.__solver
@property
def shape_type_grid(self) -> Dict[Point, ArithRef]:
"""(Dict[Point, ArithRef]): A dictionary of z3 constants of shape types.
Each cell contains the index of the shape type placed in that cell (as
indexed by the shapes list passed in to the #ShapeConstrainer constructor),
or -1 if no shape is placed within that cell.
"""
return self.__shape_type_grid
@property
def shape_instance_grid(self) -> Dict[Point, ArithRef]:
"""(Dict[Point, ArithRef]): z3 constants of shape instance IDs.
Each cell contains a number shared among all cells containing the same
instance of the shape, or -1 if no shape is placed within that cell.
"""
return self.__shape_instance_grid
def print_shape_types(self):
"""Prints the shape type assigned to each cell.
Should be called only after the solver has been checked.
"""
model = self.__solver.model()
min_y = min(p.y for p in self.__shape_type_grid)
min_x = min(p.x for p in self.__shape_type_grid)
max_y = max(p.y for p in self.__shape_type_grid)
max_x = max(p.x for p in self.__shape_type_grid)
for y in range(min_y, max_y + 1):
for x in range(min_x, max_x + 1):
p = Point(y, x)
shape_index = -1
if p in self.__shape_type_grid:
v = self.__shape_type_grid[p]
shape_index = model.eval(v).as_long()
if shape_index >= 0:
sys.stdout.write(f"{shape_index:3}")
else:
sys.stdout.write(" ")
print()
def print_shape_instances(self):
"""Prints the shape instance ID assigned to each cell.
Should be called only after the solver has been checked.
"""
model = self.__solver.model()
min_y = min(p.y for p in self.__shape_instance_grid)
min_x = min(p.x for p in self.__shape_instance_grid)
max_y = max(p.y for p in self.__shape_instance_grid)
max_x = max(p.x for p in self.__shape_instance_grid)
for y in range(min_y, max_y + 1):
for x in range(min_x, max_x + 1):
p = Point(y, x)
shape_instance = -1
if p in self.__shape_instance_grid:
v = self.__shape_instance_grid[p]
shape_instance = model.eval(v).as_long()
if shape_instance >= 0:
sys.stdout.write(f"{shape_instance:3}")
else:
sys.stdout.write(" ")
print()
| [
[
[
106,
117
],
[
5660,
5671
]
],
[
[
125,
128
],
[
8868,
8871
],
[
8929,
8932
],
[
9705,
9708
],
[
9769,
9772
]
],
[
[
148,
152
],
[
3381,
3385
],
[
3738,
3742
],
[
7492,
7496
],
[
7888,
7892
]
],
[
[
154,
158
],
[
553,
557
],
[
536,
540
],
[
2285,
2289
],
[
2290,
2294
]
],
[
[
174,
182
],
[
3393,
3401
],
[
3750,
3758
],
[
7504,
7512
],
[
7900,
7908
]
],
[
[
184,
187
],
[
3454,
3457
],
[
3811,
3814
]
],
[
[
189,
195
],
[
4966,
4972
]
],
[
[
197,
199
],
[
4466,
4468
],
[
6985,
6987
]
],
[
[
201,
207
],
[
2319,
2325
],
[
2596,
2602
],
[
7340,
7346
]
],
[
[
209,
213
],
[
7276,
7280
]
],
[
[
235,
243
],
[
4484,
4492
],
[
4630,
4638
],
[
6631,
6639
]
],
[
[
245,
252
],
[
5226,
5233
],
[
5591,
5598
]
],
[
[
254,
261
],
[
5385,
5392
]
],
[
[
284,
291
],
[
2262,
2269
]
],
[
[
293,
298
],
[
3386,
3391
],
[
3743,
3748
],
[
7497,
7502
],
[
7893,
7898
],
[
8664,
8669
],
[
9484,
9489
]
],
[
[
300,
306
],
[
558,
564
],
[
541,
547
],
[
2295,
2301
]
],
[
[
329,
347
],
[
4992,
5010
]
],
[
[
440,
455
],
[
5160,
5175
],
[
6333,
6348
]
],
[
[
457,
476
],
[
5315,
5334
],
[
6536,
6555
],
[
6818,
6837
]
],
[
[
478,
492
],
[
5526,
5540
],
[
6424,
6438
]
],
[
[
510,
528
],
[
3129,
3147
]
],
[
[
1169,
1185
],
[
2482,
2498
],
[
3466,
3482
],
[
3823,
3839
]
]
] |
def Print():
print('you may want to install beautifulsoup4,not beautfulsoup4')
| [
[
[
4,
9
]
]
] |
from os.path import abspath, dirname, join
from os import environ, path
_cwd = dirname(abspath(__file__))
basedir = path.abspath(path.dirname(__file__))
class BaseConfiguration(object):
DEBUG = True
SECRET_KEY = 'Test'
CORS = ["http://localhost:4200", "http://127.0.0.1:5000"] | [
[
[
20,
27
],
[
91,
98
]
],
[
[
29,
36
],
[
83,
90
]
],
[
[
38,
42
]
],
[
[
59,
66
]
],
[
[
68,
72
],
[
121,
125
],
[
134,
138
]
],
[
[
76,
80
]
],
[
[
111,
118
]
],
[
[
167,
184
]
]
] |
# File to hold all objects to ease construction of JSON payload.
# Non-PEP8 property declaration used as JSON serializing is 1:1, eg. "clientId = clientId", not "client_id = clientId"
class Client(object):
clientId = ""
clientVersion = "0.0.1"
def __init__(self, client_id, client_version="0.0.1"):
self.clientId = client_id
self.clientVersion = client_version
class ThreatEntry(object):
def __init__(self, url):
self.url = url
class ThreatInfo(object):
def __init__(self, threatTypes, platformTypes, threatEntryTypes, threatEntries):
self.threatTypes = threatTypes
self.platformTypes = platformTypes
self.threatEntryTypes = threatEntryTypes
self.threatEntries = threatEntries
class Request(object):
def __init__(self, client, threatInfo):
self.client = client
self.threatInfo = threatInfo
| [
[
[
192,
198
]
],
[
[
400,
411
]
],
[
[
481,
491
]
],
[
[
768,
775
]
]
] |
from .utils.defaults import default_depot_path, default_install_dir, default_symlink_dir
from .utils.filters import f_major_version, f_minor_version
from .utils import query_yes_no
from .utils import current_architecture, current_system, current_libc
from .utils import latest_version
from .utils import DmgMounter, TarMounter
from .utils import Version
from .utils import verify_upstream
from .utils import color, show_verbose
from .download import download_package
import os
import re
import shutil
import subprocess
def is_installed(version, check_symlinks=True):
"""
check if the required version is already installed.
"""
check_list = ["julia"]
if version == "latest":
check_list.append("julia-latest")
if version != "latest" and check_symlinks:
check_list.extend([f"julia-{f_major_version(version)}",
f"julia-{f_minor_version(version)}"])
for path in check_list:
if Version(get_exec_version(shutil.which(path))) != Version(version):
return False
return True
def get_exec_version(path):
ver_cmd = [path, "--version"]
try:
# outputs: "julia version 1.4.0-rc1"
version = subprocess.check_output(ver_cmd).decode("utf-8")
version = version.lower().split("version")[-1].strip()
except: # nopep8
# in case it fails in any situation: invalid target or command(.cmd)
# issue: https://github.com/abelsiqueira/jill/issues/25
version = "0.0.1"
return version
def check_installer(installer_path, ext):
filename = os.path.basename(installer_path)
if not filename.endswith(ext):
msg = f"The installer {filename} should be {ext} file"
raise ValueError(msg)
def last_julia_version(version=None):
# version should follow semantic version syntax
def sort_key(ver):
return float(ver.lstrip("v"))
version = float(f_minor_version(version)) if version else 999.999
proj_versions = os.listdir(os.path.join(default_depot_path(),
"environments"))
proj_versions = [x for x in proj_versions if re.fullmatch(r"v\d+\.\d+", x)]
proj_versions = sorted(filter(lambda ver: sort_key(ver) < version,
proj_versions),
key=sort_key)
if proj_versions:
return proj_versions[-1]
else:
return None
def make_symlinks(src_bin, symlink_dir, version):
if not os.path.isfile(src_bin):
raise(ValueError(f"{src_bin} doesn't exist."))
system = current_system()
if symlink_dir not in map(os.path.normpath, os.environ["PATH"].split(os.pathsep)):
print(f"add {symlink_dir} to PATH")
if system == "winnt":
# FIXME: this alse copies system PATH to user PATH
subprocess.run(["powershell.exe",
"setx", "PATH", f'"$env:PATH;{symlink_dir}"'])
else:
msg = "~/.bashrc will be modified"
msg += "\nif you're not using BASH, then you'll need manually"
msg += f" add {symlink_dir} to your PATH"
print(msg)
rc_file = os.path.expanduser("~/.bashrc")
with open(rc_file, "a") as file:
file.writelines("\n# added by jill\n")
file.writelines(f"export PATH={symlink_dir}:$PATH\n")
print(f"you need to restart your current shell to update PATH")
os.makedirs(symlink_dir, exist_ok=True)
new_ver = Version(get_exec_version(src_bin))
if version == "latest":
# issue 11: don't symlink to julia
link_list = ["julia-latest"]
elif len(Version(version).build) > 0:
link_list = ["julia-dev"]
elif len(new_ver.prerelease) > 0:
# issue #76
# - it is usually unwanted to symlink unstable release to `julia` and `julia-x`
# - still symlink to `julia-x.y` because otherwise there is no way to access the unstable
# release.
link_list = [f"julia-{f_minor_version(version)}"]
else:
link_list = [f"julia-{f(version)}" for f in (f_major_version,
f_minor_version)]
link_list.append("julia")
for linkname in link_list:
linkpath = os.path.join(symlink_dir, linkname)
if current_system() == "winnt":
linkpath += ".cmd"
# symlink rules:
# 1. always symlink latest
# 2. only make new symlink if it's a newer version
# - julia --> latest stable X.Y.Z
# - julia-1 --> latest stable 1.Y.Z
# - julia-1.0 --> latest stable 1.0.Z
# - don't make symlink to patch level
if os.path.exists(linkpath) or os.path.islink(linkpath):
if (os.path.islink(linkpath) and
os.readlink(linkpath) == src_bin):
# happens when installing a new patch version
continue
old_ver = Version(get_exec_version(linkpath))
if show_verbose():
print(f"old symlink version: {old_ver}")
print(f"new installation version: {new_ver}")
if old_ver > new_ver:
# if two versions are the same, use the new one
continue
msg = f"{color.YELLOW}remove old symlink"
msg += f" {linkname}{color.END}"
print(msg)
os.remove(linkpath)
print(f"{color.GREEN}make new symlink {linkpath}{color.END}")
if current_system() == "winnt":
with open(linkpath, 'w') as f:
# create a cmd file to mimic how we do symlinks in linux
f.writelines(['@echo off\n', f'"{src_bin}" %*'])
else:
os.symlink(src_bin, linkpath)
def copy_root_project(version):
mver = f_minor_version(version)
old_ver = last_julia_version(version)
if old_ver is None:
print(
f"Can't find available old root project for version {version}")
return None
env_path = os.path.join(default_depot_path(), "environments")
src_path = os.path.join(env_path, old_ver)
dest_path = os.path.join(env_path, f"v{mver}")
if src_path == dest_path:
return None
if os.path.exists(dest_path):
bak_path = os.path.join(env_path, f"v{mver}.bak")
if os.path.exists(bak_path):
print(f"{color.YELLOW}delete old backup {bak_path}{color.END}")
shutil.rmtree(bak_path)
shutil.move(dest_path, bak_path)
print(f"{color.YELLOW}move {dest_path} to {bak_path}{color.END}")
shutil.copytree(src_path, dest_path)
def install_julia_tarball(package_path,
install_dir,
symlink_dir,
version,
upgrade):
check_installer(package_path, ".tar.gz")
if re.match("(.*)\+(\w+)$", version):
# We want a different folder name for commit builds so that we can have
# julia-dev and julia-latest points to two different julia versions
suffix = 'dev'
else:
suffix = f_minor_version(version)
with TarMounter(package_path) as root:
src_path = root
dest_path = os.path.join(install_dir, f"julia-{suffix}")
if os.path.exists(dest_path):
shutil.rmtree(dest_path)
msg = f"{color.YELLOW}remove previous Julia installation:"
msg += f" {dest_path}{color.END}"
print(msg)
# preserve lib symlinks, otherwise it might cause troubles
# see also: https://github.com/JuliaGPU/CUDA.jl/issues/249
shutil.copytree(src_path, dest_path, symlinks=True)
print(f"{color.GREEN}install Julia to {dest_path}{color.END}")
os.chmod(dest_path, 0o755) # issue 12
bin_path = os.path.join(dest_path, "bin", "julia")
if current_system() == 'winnt':
bin_path += '.exe'
make_symlinks(bin_path, symlink_dir, version)
if upgrade:
copy_root_project(version)
return True
def install_julia_dmg(package_path,
install_dir,
symlink_dir,
version,
upgrade):
check_installer(package_path, ".dmg")
with DmgMounter(package_path) as root:
# mounted image contents:
# ['.VolumeIcon.icns', 'Applications', 'Julia-1.3.app']
appname = next(filter(lambda x: x.lower().startswith('julia'),
os.listdir(root)))
src_path = os.path.join(root, appname)
dest_path = os.path.join(install_dir, appname)
if os.path.exists(dest_path):
msg = f"{color.YELLOW}remove previous Julia installation:"
msg += f" {dest_path}{color.END}"
print(msg)
shutil.rmtree(dest_path)
# preserve lib symlinks, otherwise it might cause troubles
# see also: https://github.com/JuliaGPU/CUDA.jl/issues/249
shutil.copytree(src_path, dest_path, symlinks=True)
print(f"{color.GREEN}install Julia to {dest_path}{color.END}")
bin_path = os.path.join(dest_path,
"Contents", "Resources", "julia", "bin", "julia")
make_symlinks(bin_path, symlink_dir, version)
if upgrade:
copy_root_project(version)
return True
def install_julia_exe(package_path,
install_dir,
symlink_dir,
version,
upgrade):
check_installer(package_path, ".exe")
dest_path = os.path.join(install_dir,
f"julia-{f_minor_version(version)}")
if os.path.exists(dest_path):
shutil.rmtree(dest_path, ignore_errors=True)
msg = f"{color.YELLOW}remove previous Julia installation:"
msg += f" {dest_path}{color.END}"
print(msg)
# build system changes for windows after 1.4
# https://github.com/JuliaLang/julia/blob/release-1.4/NEWS.md#build-system-changes
if Version(version).next_patch() < Version("1.4.0"):
# it's always false if version == "latest"
subprocess.check_output([f'{package_path}',
'/S', f'/D={dest_path}'])
else:
subprocess.check_output([f'{package_path}',
'/VERYSILENT',
f'/DIR={dest_path}'])
print(f"{color.GREEN}install Julia to {dest_path}{color.END}")
bin_path = os.path.join(dest_path, "bin", "julia.exe")
make_symlinks(bin_path, symlink_dir, version)
if upgrade:
copy_root_project(version)
return True
def hello_msg():
msg = f"{color.BOLD}JILL - Julia Installer 4 Linux"
msg += f" (MacOS, Windows and FreeBSD) -- Light{color.END}\n"
print(msg)
def install_julia(version=None, *,
install_dir=None,
symlink_dir=None,
upgrade=False,
upstream=None,
unstable=False,
keep_downloads=False,
confirm=False,
reinstall=False):
"""
Install the Julia programming language for your current system
`jill install [version]` would satisfy most of your use cases, try it first
and then read description of other arguments. `version` is optional, valid
version syntax for it is:
* `stable`: latest stable Julia release. This is the _default_ option.
* `1`: latest `1.y.z` Julia release.
* `1.0`: latest `1.0.z` Julia release.
* `1.4.0-rc1`: as it is.
* `latest`/`nightly`: the nightly builds from source code.
For Linux/FreeBSD systems, if you run this command with `root` account,
then it will install Julia system-widely.
To download from a private mirror, please check `jill download -h`.
Arguments:
version:
The Julia version you want to install.
upstream:
manually choose a download upstream. For example, set it to "Official"
if you want to download from JuliaComputing's s3 buckets.
upgrade:
add `--upgrade` flag also copy the root environment from an older
Julia version.
unstable:
add `--unstable` flag to allow installation of unstable releases for auto version
query. For example, `jill install --unstable` might give you unstable installation
like `1.7.0-beta1`. Note that if you explicitly pass the unstable version, e.g.,
`jill install 1.7.0-beta1`, it will still work.
keep_downloads:
add `--keep_downloads` flag to not remove downloaded releases.
confirm: add `--confirm` flag to skip interactive prompt.
reinstall:
jill will skip the installation if the required Julia version already exists,
add `--reinstall` flag to force the reinstallation.
install_dir:
where you want julia packages installed.
symlink_dir:
where you want symlinks(e.g., `julia`, `julia-1`) placed.
"""
install_dir = install_dir if install_dir else default_install_dir()
install_dir = os.path.abspath(install_dir)
symlink_dir = symlink_dir if symlink_dir else default_symlink_dir()
symlink_dir = os.path.normpath(os.path.abspath(symlink_dir))
system, arch = current_system(), current_architecture()
version = str(version) if (version or str(version) == "0") else ''
version = "latest" if version == "nightly" else version
version = "" if version == "stable" else version
upstream = upstream if upstream else os.environ.get("JILL_UPSTREAM", None)
if system == "linux" and current_libc() == "musl":
# currently Julia tags musl as a system, e.g.,
# https://julialang-s3.julialang.org/bin/musl/x64/1.5/julia-1.5.1-musl-x86_64.tar.gz
system = "musl"
hello_msg()
if system == "winnt":
install_dir = install_dir.replace("\\\\", "\\").strip('\'"')
if not confirm:
version_str = version if version else "latest stable release"
question = "jill will:\n"
question += f" 1) install Julia {version_str} for {system}-{arch}"
question += f" into {color.UNDERLINE}{install_dir}{color.END}\n"
question += f" 2) make symlinks in {color.UNDERLINE}{symlink_dir}{color.END}\n"
question += f"You may need to manually add {color.UNDERLINE}{symlink_dir}{color.END} to PATH\n"
question += "Continue installation?"
to_continue = query_yes_no(question)
if not to_continue:
return False
if upstream:
verify_upstream(upstream)
wrong_args = False
try:
version = latest_version(
version, system, arch, upstream=upstream, stable_only=not unstable)
except ValueError:
# hide the nested error stack :P
wrong_args = True
if wrong_args:
msg = f"wrong version(>= 0.6.0) argument: {version}\n"
msg += f"Example: `jill install 1`"
raise(ValueError(msg))
if not reinstall and is_installed(version):
print(f"julia {version} already installed.")
return True
overwrite = True if version == "latest" else False
print(f"{color.BOLD}----- Download Julia -----{color.END}")
package_path = download_package(version, system, arch,
upstream=upstream,
overwrite=overwrite)
if not package_path:
return False
if package_path.endswith(".dmg"):
installer = install_julia_dmg
elif package_path.endswith(".tar.gz"):
installer = install_julia_tarball
elif package_path.endswith(".exe"):
installer = install_julia_exe
else:
print(f"{color.RED}Unsupported file format for {package_path}{color.END}.")
print(f"{color.BOLD}----- Install Julia -----{color.END}")
installer(package_path, install_dir, symlink_dir, version, upgrade)
if not keep_downloads:
print(f"{color.BOLD}----- Post Installation -----{color.END}")
print("remove downloaded files...")
print(f"remove {package_path}")
os.remove(package_path)
gpg_signature_file = package_path + ".asc"
if os.path.exists(gpg_signature_file):
print(f"remove {gpg_signature_file}")
os.remove(gpg_signature_file)
print(f"{color.GREEN}Done!{color.END}")
| [
[
[
28,
46
],
[
2011,
2029
],
[
6071,
6089
]
],
[
[
48,
67
],
[
13062,
13081
]
],
[
[
69,
88
],
[
13181,
13200
]
],
[
[
116,
131
],
[
826,
841
],
[
4115,
4130
]
],
[
[
133,
148
],
[
890,
905
],
[
1917,
1932
],
[
4024,
4039
],
[
4185,
4200
],
[
5840,
5855
],
[
7141,
7156
],
[
9644,
9659
]
],
[
[
168,
180
],
[
14464,
14476
]
],
[
[
200,
220
],
[
13305,
13325
]
],
[
[
222,
236
],
[
2578,
2592
],
[
4335,
4349
],
[
5529,
5543
],
[
7884,
7898
],
[
13287,
13301
]
],
[
[
238,
250
],
[
13621,
13633
]
],
[
[
270,
284
],
[
14642,
14656
]
],
[
[
304,
314
],
[
8280,
8290
]
],
[
[
316,
326
],
[
7176,
7186
]
],
[
[
346,
353
],
[
959,
966
],
[
1008,
1015
],
[
3510,
3517
],
[
3666,
3673
],
[
4984,
4991
],
[
10031,
10038
],
[
10063,
10070
]
],
[
[
373,
388
],
[
14566,
14581
]
],
[
[
408,
413
],
[
5315,
5320
],
[
5381,
5386
],
[
5465,
5470
],
[
5505,
5510
],
[
6409,
6414
],
[
6451,
6456
],
[
6558,
6563
],
[
6602,
6607
],
[
7395,
7400
],
[
7479,
7484
],
[
7725,
7730
],
[
7766,
7771
],
[
8695,
8700
],
[
8779,
8784
],
[
9062,
9067
],
[
9103,
9108
],
[
9776,
9781
],
[
9856,
9861
],
[
10421,
10426
],
[
10462,
10467
],
[
10683,
10688
],
[
10778,
10783
],
[
14160,
14165
],
[
14190,
14195
],
[
14249,
14254
],
[
14279,
14284
],
[
14345,
14350
],
[
14375,
14380
],
[
15176,
15181
],
[
15214,
15219
],
[
15711,
15716
],
[
15764,
15769
],
[
15792,
15797
],
[
15829,
15834
],
[
15959,
15964
],
[
16000,
16005
],
[
16332,
16337
],
[
16350,
16355
]
],
[
[
415,
427
],
[
5035,
5047
]
],
[
[
450,
466
],
[
15246,
15262
]
],
[
[
475,
477
],
[
1582,
1584
],
[
1987,
1989
],
[
1998,
2000
],
[
2484,
2486
],
[
2625,
2627
],
[
2643,
2645
],
[
2668,
2670
],
[
3176,
3178
],
[
3455,
3457
],
[
4288,
4290
],
[
4720,
4722
],
[
4748,
4750
],
[
4790,
4792
],
[
4839,
4841
],
[
5428,
5430
],
[
5765,
5767
],
[
6058,
6060
],
[
6124,
6126
],
[
6172,
6174
],
[
6266,
6268
],
[
6312,
6314
],
[
6362,
6364
],
[
7254,
7256
],
[
7310,
7312
],
[
7783,
7785
],
[
7837,
7839
],
[
8515,
8517
],
[
8553,
8555
],
[
8601,
8603
],
[
8647,
8649
],
[
9131,
9133
],
[
9580,
9582
],
[
9679,
9681
],
[
10490,
10492
],
[
13102,
13104
],
[
13221,
13223
],
[
13238,
13240
],
[
13553,
13555
],
[
16105,
16107
],
[
16191,
16193
],
[
16289,
16291
]
],
[
[
485,
487
],
[
2143,
2145
],
[
6900,
6902
]
],
[
[
495,
501
],
[
984,
990
],
[
6476,
6482
],
[
6508,
6514
],
[
6619,
6625
],
[
7349,
7355
],
[
7656,
7662
],
[
8826,
8832
],
[
8993,
8999
],
[
9714,
9720
]
],
[
[
509,
519
],
[
1203,
1213
],
[
2831,
2841
],
[
10140,
10150
],
[
10261,
10271
]
],
[
[
526,
538
],
[
15011,
15023
]
],
[
[
1073,
1089
],
[
967,
983
],
[
3518,
3534
],
[
4992,
5008
]
],
[
[
1529,
1544
],
[
6851,
6866
],
[
8232,
8247
],
[
9525,
9540
]
],
[
[
1749,
1767
],
[
5879,
5897
]
],
[
[
2427,
2440
],
[
7944,
7957
],
[
9237,
9250
],
[
10538,
10551
]
],
[
[
5801,
5818
],
[
8014,
8031
],
[
9307,
9324
],
[
10608,
10625
]
],
[
[
6662,
6683
],
[
15584,
15605
]
],
[
[
8063,
8080
],
[
15503,
15520
]
],
[
[
9356,
9373
],
[
15666,
15683
]
],
[
[
10657,
10666
],
[
13824,
13833
]
],
[
[
10813,
10826
]
]
] |
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'ValidCores': [1], 'ValidThreadsPerCore': [1], 'SizeInMiB': 2048, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.medium', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'ValidCores': [1], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 2048}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'ValidCores': [2], 'ValidThreadsPerCore': [1], 'SizeInMiB': 4096, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.large', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'ValidCores': [2], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 4096}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'ValidCores': [4], 'ValidThreadsPerCore': [1], 'SizeInMiB': 8192, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'ValidCores': [4], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 8192}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'ValidCores': [8], 'ValidThreadsPerCore': [1], 'SizeInMiB': 16384, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.2xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'ValidCores': [8], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 16384}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'ValidCores': [16], 'ValidThreadsPerCore': [1], 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.4xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'ValidCores': [16], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with InstanceType = a .'''
# pylint: disable=all
return get
| [
[
[
50,
53
],
[
9123,
9126
]
],
[
[
8995,
9013
]
]
] |
#
# Tic Tac Toe
#
import numpy as np
from gym import spaces
WinMasks = [
[
[1,0,0],
[1,0,0],
[1,0,0],
],
[
[0,1,0],
[0,1,0],
[0,1,0],
],
[
[0,0,1],
[0,0,1],
[0,0,1],
],
[
[1,1,1],
[0,0,0],
[0,0,0],
],
[
[0,0,0],
[1,1,1],
[0,0,0],
],
[
[0,0,0],
[0,0,0],
[1,1,1],
],
[
[1,0,0],
[0,1,0],
[0,0,1],
],
[
[0,0,1],
[0,1,0],
[1,0,0],
]
]
WinMasks = np.array(WinMasks).reshape((-1,9))
class SingleAgentTicTacToeEnv(object):
NActions = 9
ObservationShape = (9,)
NState = 9
def __init__(self):
self.Board = np.zeros((9,))
self.action_space = spaces.Discrete(self.NActions)
high = np.ones((self.NActions,))
self.observation_space = spaces.Box(-high, high, dtype=np.float32)
def reset(self):
self.Done = False
self.Board[...] = 0.0
self.BoardHistory = []
self.Side = 1
self.FirstMove = True
return self.observation(self.Side), {"valid_actions":np.array([1,1,0,0,1,0,0,0,0], dtype=np.float32)}
def observation(self, side):
return self.Board * side
def step(self, action):
win = False
draw = False
side = self.Side
other_side = -side
color = side
reward = 0.0
done = False
if self.Board[action] != 0:
# invalid move
reward = -1.0
done = True
else:
self.Board[action] = side
self.BoardHistory.append(self.Board.reshape((3,3)).copy())
for win_mask in WinMasks:
masked = self.Board*color*win_mask
if np.sum(masked) == 3:
reward = 1.0
done = True
break
if np.all(self.Board != 0):
done = True # draw
self.Side = other_side
self.Done = done
self.Reward = reward
return self.observation(self.Side), reward, done, {"valid_actions":np.asarray(self.Board==0, dtype=np.float32)}
def render(self):
if self.Done:
last_move = -self.Side
history = self.BoardHistory
sep = "+---"*len(history) + "+"
lines = [sep]
for irow in (0,1,2):
line = "|"
for b in history:
row = "".join(".xo"[int(c)] for c in b[irow])
line += row + "|"
lines.append(line)
outcome = "draw"
if self.Reward:
outcome = "%s won" % (".xo"[int(last_move)])
lines.append(sep + " " + outcome)
print("\n".join(lines))
if __name__ == "__main__":
import random
def show_board(board):
sep = "+---"*3 + "+"
out = [sep]
for row in board.reshape((3,3)):
line = "| "
for x in row:
line += " OX"[int(x)] + " | "
out.append(line)
out.append(sep)
return "\n".join(out)
class Agent(object):
def __init__(self, side):
self.Side = side
self.Sign = "XO"[side]
self.Color = side*2-1
def reset(self):
pass
def action(self, reward, observation, available_actions):
print(f"{self.Sign}: action:", reward, observation, available_actions)
choices = [i for i, x in enumerate(available_actions) if x]
i = random.choice(choices)
return i
def reward(self, r):
#print(f"{self.Sign}: reward: {r}")
pass
def done(self, r, last_observation):
if r > 0:
print(f"===== {self.Sign} won")
elif r < 0:
print(f"===== {self.Sign} lost")
else:
print("===== draw")
class Callback(object):
def end_turn(self, agents, data):
print(show_board(data["board"]))
def end_episode(self, agents, data):
print("--- game over ---")
print(env.show_history(data["board_history"]))
x_agent = Agent(0)
y_agent = Agent(1)
env = TicTacToeEnv()
env.run([x_agent, y_agent], [Callback])
| [
[
[
26,
37
],
[
606,
608
],
[
796,
798
],
[
885,
887
],
[
974,
976
],
[
1216,
1218
],
[
1252,
1254
],
[
1892,
1894
],
[
2040,
2042
],
[
2264,
2266
],
[
2296,
2298
]
],
[
[
54,
60
],
[
839,
845
],
[
944,
950
]
],
[
[
62,
70
],
[
615,
623
]
],
[
[
595,
603
],
[
1812,
1820
]
],
[
[
648,
671
]
],
[
[
2996,
3002
],
[
3780,
3786
]
],
[
[
3016,
3026
],
[
4292,
4302
]
],
[
[
3323,
3328
],
[
4498,
4503
],
[
4521,
4526
]
],
[
[
4205,
4213
],
[
4593,
4601
]
],
[
[
4488,
4495
],
[
4573,
4580
]
],
[
[
4511,
4518
],
[
4582,
4589
]
],
[
[
4539,
4542
],
[
4564,
4567
],
[
4434,
4437
]
]
] |
###################################################################
""" Summary: Class and Methods for deriving MCSS based MMP's
About: Derive a matched pair based MCSS from a pair molecules
To do: - extend the method enumerate_fragment_properties to also
enumerate self.mol_smi_dict as this would allow the addition
of a flag '-p' that prints out whole molecule props alongside
MCSS and therefore compute %molecule that the MCSS covers
- could use other descriptors from IW code to get MCSS via
bond count not #Atoms or
- Should move iterators in process_mcss_list_to_string to be numeric
and store numeric ID's in self.largest_mcs_mmp_double/single
- could allow further switched to change behaviour of tie break
where single/double or double alone give tie break MCSS
[connected substructures versus disconnected or both/either]
- Extension to triple cut would allow improved search/match e.g.:
N1(C(c2c(cc3c(c2)OCO3)CC1)c4cc(c(c(c4)OC)O)OC)C(=O)OC CHEMBL311765
N1(C(c2c(cc(cc2)O)CC1)c3ccc(cc3)OCCN4CCCC4)C(=O)OCC CHEMBL94080
"""
###################################################################
import logging
import csv
import os
import sys
import unittest
import tempfile
from builtins import range
from mmp.mmp_data_objects import MMPDataObjectClass
if 'LILLYMOL_HOME' in os.environ:
import pybase.pyopmo as pymo
else:
import pybase.pymo as pymo
class MMPbasedMCSSObjectClass(MMPDataObjectClass):
def __init__(self, logger_object):
"""
Example usage:
mmplogger = logging.getLogger('lillymol_file_logger')
logging.disable(logging.CRITICAL)
my_mmp_mcss_object = MMPbasedMCSSObjectClass(mmplogger)
"""
MMPDataObjectClass.__init__(self, logger_object)
self.logger = logger_object
if len(logging.Logger.manager.loggerDict) < 1:
# exit with system status 1 and custom error
sys.exit("Invalid or no logger object passed to MMPObjectClass. Please create \
and pass a logger and set to use logging.disable if you don't want logging")
# this is used for storing the largest MCS MMP for given pair
self.largest_mcs_mmp_result = {}
self.ref_smi_props = {}
def clean_out_data_mcss_obj(self):
"""Method to clean out all objects in class"""
self.clean_out_data()
self.mcs_mmp.clear()
def enumerate_fragment_properties(self):
"""Writes out the ref_smi_dict to disk, calculates natoms, returns data to self.ref_smi_props
Some complexities in method such as double cut fragments (iw_descr only calcs largest frag)"""
frag_smi_file = tempfile.NamedTemporaryFile(delete=False, suffix='.smi')
frag_smi_props_out = tempfile.NamedTemporaryFile(delete=False)
with open(frag_smi_file.name, "w") as f:
for item in self.refsmi_dict:
if isinstance(item, int):
# can't see an easy way to do this except string compare, [1H] causes iw_descr to crash out
if self.refsmi_dict[item] != '[1H]':
f.write(self.refsmi_dict[item]+" "+str(item)+"\n")
# run pymo.iwdescr
self.logger.info("Running pymo.iwdescr on %s smi with in:%s, out:%s" %
(len(self.refsmi_dict), frag_smi_file.name, frag_smi_props_out.name))
exit_status = pymo.iwdescr(frag_smi_file.name, frag_smi_props_out.name, params_dict={'-l': '', '-v': ''},
loggero=self.logger)
self.logger.debug("Ran iwdescr with exit status %s" % exit_status)
with open(frag_smi_props_out.name, "r") as csv_file:
reader = csv.reader(csv_file, delimiter=' ')
i = -1
for row in reader:
i += 1
# if header row, append headers
if i == 0:
if row[1] != 'w_natoms':
self.logger.warn("When this was written, NATOMs was in array position 1 (zero indexed) with "
"column title w_natoms. Now it's not, it's: %s" % row[1])
sys.exit("When this was written, NATOMs was in array position 1 (zero indexed) with column "
"title w_natom. Now it's not, it's: %s" % row[1])
continue
# we trust there is only one entry per id
# print row[0], row[1]
self.ref_smi_props[int(row[0])] = int(row[1])
frag_smi_props_out.close()
self.logger.debug("Completed load of %s mol props from dict of %s from file %s" %
(len(self.ref_smi_props), len(self.refsmi_dict)/2, frag_smi_props_out.name))
def get_largest_mcs_pairs(self, out_file, cut_type, mdc_atm_soft=None, mdc_atm_soft_threshold=None,
mdc_atm_hard=None):
"""Method to print out a single smi - smi pair from the input CSV with data differences. Selection of the
exact matched pair for a given smi - smi combination is based on the largest Maximum Common Substructure
which equates to the MMP with the smallest MWT/#Atoms difference across all MMP's for that smi/smi combo
out_file:
The user specified output file
cut_type:
Specifies the type of fragmentation required. Allowed values are SINGLE,
DOUBLE or BOTH. Currently this class does not support anything greater than
double cut fragmentation
mdc_atm_hard:
max double cut atom cutoff (hard)
Never consider double cut context fragments where one half has num_atoms <= mdc_atm_hard
i.e.: this is a hard cutoff filter implemented during dicer parsing
mdc_atm_soft:
max double cut atom cutoff (soft)
* must be used with mdc_atm_soft_threshold
When double cut is greater than single, if one part of double context has num_atoms <= mdc_atm_soft and
total double cut atom <= single cut atoms + mdc_atm_soft_threshold then discard
mdc_atm_soft_threshold:
max double cut atom cutoff threshold (soft)
* must be used with mdc_atm_soft
This gets added to single cut num atoms each comparison that's done, if and when mdc_atm_soft is set
see details of mdc_atm_soft
Example usage:
# give me a CSV named my_output.pairs of all MCS based pairs:
my_mmp_object.get_largest_mcs_pairs('myoutput.csv', 'BOTH', 'DICER')
# give me a CSV of only the DOUBLE cut MCS based pairs with RDKit attachment points:
my_mmp_object.get_largest_mcs_pairs('myoutput.csv', 'DOUBLE', 'RDKIT')
"""
if (mdc_atm_soft is not None and mdc_atm_soft_threshold is None) or\
(mdc_atm_soft is None and mdc_atm_soft_threshold is not None):
sys.exit("Error, mdc_atm_soft and mdc_atm_soft_threshold must be specified together.")
def process_mcss_list_to_string(prefix, input_list):
"""sub method to build a printable string from input list of specific structure"""
out_string = ''
num_of_entries = len(input_list)
if num_of_entries > 4:
for i_ in range(0, num_of_entries, 4):
out_string = out_string + prefix + "_" + str((i_/4)+1) + "," + str(molid_L) + "," + str(molid_R)
out_string = out_string + "," + str(sum(input_list[0 + i_])) + "," + str(input_list[1 + i_]) + ","
out_string = out_string + str(input_list[2 + i_]) + "," + str(input_list[3 + i_])
out_string += "\n"
else:
if len(input_list[1]) > 1:
ctx_smi = self.refsmi_dict[input_list[1][0]] + "." + self.refsmi_dict[input_list[1][1]]
else:
ctx_smi = self.refsmi_dict[input_list[1][0]]
out_string = prefix + "," + str(molid_L) + "," + str(molid_R) + ","
out_string = out_string + str(sum(input_list[0])) + "," + ctx_smi + ","
out_string = out_string + str(self.refsmi_dict[input_list[2]]) + "," \
+ str(self.refsmi_dict[input_list[3]])
out_string += "\n"
return out_string
def disambiguate_double_list(input_list):
"""sub method to untangle double cut tie break cases"""
num_of_entries = len(input_list)
filtered_list = []
# The tie code should have only saved the example with the largest 'smallest fragment' size
# so now we just take the first example where atom numbering [1 before [2
# Theoretically, if two different examples of a double cut fragmentation pattern exist with the same number
# of atoms *in both parts* of the context, then there is another tie break here. e.g.:
# num_atoms in context = (2,10) should always appear not (1,11) but can't disentangle many (1,11)
# Decided not to handle this and instead just take the first one with the ordered numbering
for i_ in range(0, num_of_entries, 4):
# only use if the isomeric label is the right way round, [1 before [2
if '[1' in self.refsmi_dict[input_list[1 + i_][0]]:
filtered_list = input_list[(0 + i_): (4 + i_)]
else:
continue
return filtered_list
def remove_atom_num_dupes(input_list):
"""sub method to get only 1 example of simple isomeric numbering flip"""
# only use if the isomeric label is the right way round, [1 before [2
if '[1' in self.refsmi_dict[input_list[1][0]]:
# take the first 4 items
output_list = input_list[:4]
else:
# just take the last 4 items
output_list = input_list[-4:]
return output_list
self.logger.info('Opening output file for write: %s' % out_file)
# check cut_type, convert to int
if cut_type.upper() == 'DOUBLE':
# confusing but faster later
cut_type_id = 3
elif cut_type.upper() == 'BOTH':
# confusing but faster later
cut_type_id = 2
elif cut_type.upper() == 'SINGLE':
cut_type_id = 1
else:
self.logger.warn('cut_type specification is incorrect, using single cut: %s' % cut_type.upper())
cut_type_id = 1
# fail if both single_pairs_dict and double_pairs_dict are empty
if (len(self.single_pairs_dict) == 0) and (len(self.double_pairs_dict) == 0):
self.logger.debug('No data found in single_pairs_dict and/or double_pairs_dict, expect no results')
# sys.exit("Error: no data found in single_pairs_dict and/or double_pairs_dict, nothing to find and write")
#
# Here we build data structures of type:
# self.largest_mcs_mmp_result[(molid_L, molid_R)] = [(#atoms, #atoms or None),
# (context_id, context_id or None), frag_Left_id, frag_Right_id]
#
# single - this is easy as we only keep/store the one with the greatest number of atoms
if cut_type_id <= 2:
for molid_L, molid_R, ctx_id, frag_L_id, frag_R_id in \
self.iterator_single_pairs_dict_numeric(inc_attachpt=False):
if (molid_L, molid_R) in self.largest_mcs_mmp_result:
if self.largest_mcs_mmp_result[(molid_L, molid_R)][0][0] <= self.ref_smi_props[ctx_id]:
if self.largest_mcs_mmp_result[(molid_L, molid_R)][0][0] == self.ref_smi_props[ctx_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)].extend(
[(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id])
else:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [
(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id]
else:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [
(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id]
# now build the final results on the fly
# double - for each one we compare against what we already have in self.largest_mcs_mmp_result
ctx_natoms = None
if cut_type_id >= 2:
for molid_L, molid_R, ctx1_id, ctx2_id, frag_L_id, frag_R_id in \
self.iterator_double_pairs_dict_numeric(inc_attachpt=False):
#
if ctx1_id in self.ref_smi_props:
ctx_natoms = (self.ref_smi_props[ctx1_id], )
else:
ctx1_smi = self.refsmi_dict[ctx1_id]
ctx1_smi = ctx1_smi.replace("[1", "[9")
ctx1_smi = ctx1_smi.replace("[2", "[1")
ctx1_smi = ctx1_smi.replace("[9", "[2")
try:
ctx_natoms = (self.ref_smi_props[self.refsmi_dict[ctx1_smi]], )
except:
print("ERR >>>")
print(("{} {} {} {} {} {}".format(molid_L, molid_R, ctx1_id, ctx2_id, frag_L_id, frag_R_id)))
print(("{} {} {}".format(ctx1_id, ctx1_smi, self.refsmi_dict[ctx1_smi])))
print("")
if ctx2_id in self.ref_smi_props:
ctx_natoms = ctx_natoms + (self.ref_smi_props[ctx2_id], )
else:
ctx2_smi = self.refsmi_dict[ctx2_id]
ctx2_smi = ctx2_smi.replace("[1", "[9")
ctx2_smi = ctx2_smi.replace("[2", "[1")
ctx2_smi = ctx2_smi.replace("[9", "[2")
ctx_natoms = ctx_natoms + (self.ref_smi_props[self.refsmi_dict[ctx2_smi]], )
# If the indicator flag check_all_context is set to true we need to pre-filter all ctx fragments
# to ensure they are greater than or equal to the specified limit for mdc_atm_hard (maximum double
# cut atoms hard limit). This is a crude filter and could remove valid double cut MCSS.
if mdc_atm_hard is not None:
if ctx_natoms[0] <= mdc_atm_hard:
continue
elif ctx_natoms[1] <= mdc_atm_hard:
continue
#
# Main
# have we seen this smi - smi pair before?
if (molid_L, molid_R) in self.largest_mcs_mmp_result:
# get the number of atoms in the context
num_atoms_existing = self.largest_mcs_mmp_result[(molid_L, molid_R)][0]
if len(num_atoms_existing) > 1:
total_num_atoms_existing = sum(num_atoms_existing)
else:
total_num_atoms_existing = num_atoms_existing[0]
total_num_atoms_new = sum(ctx_natoms)
if total_num_atoms_new > total_num_atoms_existing:
# if it is a double and we have a min fragment setting
if mdc_atm_soft is not None:
# if it falls below the threshold at which we apply this min frag setting
if total_num_atoms_new <= (total_num_atoms_existing + mdc_atm_soft_threshold):
# only keep if both frag sizes are legal
if '[1' in self.refsmi_dict[ctx1_id]:
if (ctx_natoms[0] > mdc_atm_soft) and (ctx_natoms[1] > mdc_atm_soft):
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
# above threshold so keep anyway
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
# tie-break
elif total_num_atoms_new == total_num_atoms_existing:
# single always wins over double, so only consider this if existing is double
# double cut tie breaks get disambiguated later using custom function
if len(num_atoms_existing) == 1:
continue
else:
# consider the size of the 'smallest fragment' and add if same, replace if bigger,
# drop if smaller
if min(ctx_natoms) > min(num_atoms_existing):
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
elif min(ctx_natoms) == min(num_atoms_existing):
self.largest_mcs_mmp_result[(molid_L, molid_R)].extend(
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id])
else:
# don't store as we have a better context with a larger 'smallest fragment'
continue
# double cut context must be smaller than what we already have so discard this new one
else:
continue
else:
# new result, case where we only have a double cut MCSS so add it!
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [ctx_natoms, (ctx1_id, ctx2_id),
frag_L_id, frag_R_id]
with open(out_file, "w") as final_out:
final_out.write('CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R\n')
# do single cut first as these take precedence above a double
for (molid_L, molid_R) in self.largest_mcs_mmp_result:
list_length = len(self.largest_mcs_mmp_result[(molid_L, molid_R)])
# the list self.largest_mcs_mmp_result[(molid_L, molid_R)] contains an ordered list of items
# the first 4 are (1) a tuple of the num_atoms (2) fragment (3&4) context in two parts
# Therefore if the list is greater than 8 items it means we have more than one double
# cut that we need to consider, possibly as a double cut tie break. We do not consider the
# case where there are 8 items as we know this will be two identical fragmentation patterns
# with differing isomeric numbering on the atom attachment points therefore we use >8 not >=8
if list_length > 8:
if len(self.largest_mcs_mmp_result[(molid_L, molid_R)][0]) == 1:
# disambiguate single cut list
final_out.write(process_mcss_list_to_string('SINGLE', self.largest_mcs_mmp_result[
(molid_L, molid_R)][0:4]))
else:
# print("Double won (a): ", molid_L, molid_R, self.largest_mcs_mmp_result[(molid_L, molid_R)])
new_list = disambiguate_double_list(self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('DOUBLE', new_list))
elif list_length == 4:
# print("Single won (a): ", molid_L, molid_R, self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('SINGLE', self.largest_mcs_mmp_result[
(molid_L, molid_R)]))
else:
# print("Double wins (b): ", molid_L, molid_R, self.largest_mcs_mmp_result[(molid_L, molid_R)])
# need to remove atom numbering dupes then print
new_list = remove_atom_num_dupes(self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('DOUBLE', new_list))
class _TestMMPbasedMCSSObjectClass(unittest.TestCase):
"""Test class for MMPDataObjectClass(object) written to use pythons unittest
Example usage:
python mmp_mcss_objects.py
coverage run mmp_mcss_objects.py
coverage report mmp_mcss_objects.py
"""
def setUp(self):
"""Instantiate temp file names, test data objects that get written to temp files
a silent logger object (needed to instantiate class) and the mmp object we'll test"""
self.maxDiff = None
# setup test data location use tempfile.NamedTemporaryFile(delete=False) to persist data on disk
self.temp_file_input_smi_01 = tempfile.NamedTemporaryFile(delete=False, suffix=".smi",
encoding='utf-8', mode='wt')
self.temp_file_input_smi_03 = tempfile.NamedTemporaryFile(delete=False, suffix=".smi",
encoding='utf-8', mode='wt')
self.temp_file_output_pairs = tempfile.NamedTemporaryFile(delete=False)
# setup a logger object
self.mmplogger = logging.getLogger('mmpobjectclass_testlogger')
# logging.disable(logging.CRITICAL)
# create empty mmp object
self.test_mmp_mcss_object = MMPbasedMCSSObjectClass(self.mmplogger)
# data set for use in testing input
self.test_dataset_goldeninput_smi_01 = {
# The following represent synthetic data, analogues of CHEMBL1382609
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL1382609/
# 1. substituents are added to the pyrazole ring to generate side chain MMPs
# H on CHEMBL1382609 between two methyls is changed to Br, F, C, I to
# visually see the change in the smiles string (avoiding Cl as already present)
# e.g.: N1C(=C(Br)C(=N1)C)C
# 2. core ring system is modified (phenyl to pyridine) to see ring switch MMP's
# Presence/Absence of Pyridine-N and N-positional isomerism in Cl-Ph ring
# e.g.: C2=NC(=CS2)C2=CC=C(Cl)C=C2 + addition of N ->
# C2=NC(=CS2)C2=CN=C(Cl)C=C2 + move N around ring ->
# C2=NC(=CS2)C2=NC=C(Cl)C=C2
# for 1,2 single wins
'001': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(Br)C(=N1)C)C',
'002': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
# for 2,5 double wins tie
'003': 'N1(C2=NC(=CS2)C2=CN=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
# The following represent synthetic data, analogues of CHEMBL1341352
# for 1341352 and it's synthetic unsubstituted analogue there is no double
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL1341352/
'1341352': 'Cc1cc(nn1CC(=O)NCc2ccccc2)C(F)(F)F',
'004': 'c1cc(nn1CC(=O)NCc2ccccc2)',
# more double cut only
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL6211
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL6232
'6211': 'O=C(OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC)CCCCCCC',
'6232': 'O=C(N1C(CN(C(=O)c2cc(c(OC)c(c2)OC)OC)CC1)COC(=O)CC(C)(C)C)c1cc(c(OC)c(OC)c1)OC'
}
self.test_dataset_goldeninput_smi_03 = {
# repeat of above
'001': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(Br)C(=N1)C)C',
'002': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
}
# all smiles are output from above input as either a repeat smiles or a fragment of them
self.test_dataset_golden_output_01 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'DOUBLE,2,3,14,[1ClH].Fc1c([n](c2sc[2cH][n]2)[n]c1C)C,[1cH]1cc[2cH]cc1,[n]1[1cH]cc[2cH]c1': None,
'DOUBLE,3,2,14,[1ClH].Fc1c([n](c2sc[2cH][n]2)[n]c1C)C,[n]1[1cH]cc[2cH]c1,[1cH]1cc[2cH]cc1': None,
'SINGLE,1341352,4,11,O=C(NCc1ccccc1)[1CH3],Cc1[1nH][n]c(C(F)(F)F)c1,[1nH]1[n]ccc1': None,
'SINGLE,4,1341352,11,O=C(NCc1ccccc1)[1CH3],[1nH]1[n]ccc1,Cc1[1nH][n]c(C(F)(F)F)c1': None,
'DOUBLE,6211,6232,40,[1CH4].[2CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,[1CH3]CCC[2CH3],C[12CH2]C': None,
'DOUBLE,6232,6211,40,[1CH4].[2CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,C[12CH2]C,[1CH3]CCC[2CH3]': None}
self.test_dataset_golden_output_02 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'SINGLE,2,3,13,Fc1c([n](c2sc[1cH][n]2)[n]c1C)C,Clc1cc[1cH]cc1,Clc1[n]c[1cH]cc1': None,
'SINGLE,3,2,13,Fc1c([n](c2sc[1cH][n]2)[n]c1C)C,Clc1[n]c[1cH]cc1,Clc1cc[1cH]cc1': None,
'SINGLE,1341352,4,11,O=C(NCc1ccccc1)[1CH3],Cc1[1nH][n]c(C(F)(F)F)c1,[1nH]1[n]ccc1': None,
'SINGLE,4,1341352,11,O=C(NCc1ccccc1)[1CH3],[1nH]1[n]ccc1,Cc1[1nH][n]c(C(F)(F)F)c1': None,
'SINGLE,6211,6232,39,[1CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,[1CH3]CCCCC,C[1CH](C)C': None,
'SINGLE,6232,6211,39,[1CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,C[1CH](C)C,[1CH3]CCCCC': None}
self.test_dataset_golden_output_03 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'DOUBLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'DOUBLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None}
# write test data to temp file (smi)
for smi_id, smi in list(self.test_dataset_goldeninput_smi_01.items()):
self.temp_file_input_smi_01.write(smi + " " + smi_id + "\n")
self.temp_file_input_smi_01.close()
# write test data to temp file (smi)
for smi_id, smi in list(self.test_dataset_goldeninput_smi_03.items()):
self.temp_file_input_smi_03.write(smi + " " + smi_id + "\n")
self.temp_file_input_smi_03.close()
# container for results data
self.test_dataset_testresults = {}
def tearDown(self):
"""Tear down object for clean reuse in further tests"""
# clean out the object
self.test_mmp_mcss_object.clean_out_data()
# clean out the temp data store
self.test_dataset_testresults.clear()
os.remove(self.temp_file_input_smi_01.name)
def test_get_largest_mcs_pairs_with_diff(self):
"""Test method to get largest MCS MMP for given smi - smi pair"""
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_01.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH')
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_01, self.test_dataset_testresults)
def test_get_largest_mcs_pairs_mdc_atm_hard(self):
"""Test method to get largest MCS MMP for given smi - smi pair"""
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_01.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH', mdc_atm_hard=4)
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_02, self.test_dataset_testresults)
def test_get_largest_mcs_pairs_mdc_atm_soft(self):
"""Test method to get largest MCS MMP for given smi - smi pair"""
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_03.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
#
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH')
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH', mdc_atm_soft=3,
mdc_atm_soft_threshold=4)
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_03, self.test_dataset_testresults)
if __name__ == '__main__':
unittest.main()
| [
[
[
1239,
1246
],
[
1928,
1935
],
[
22461,
22468
]
],
[
[
1254,
1257
],
[
3827,
3830
]
],
[
[
1265,
1267
],
[
1415,
1417
],
[
28934,
28936
]
],
[
[
1275,
1278
],
[
2037,
2040
],
[
4314,
4317
],
[
7098,
7101
]
],
[
[
1286,
1294
],
[
21359,
21367
],
[
32583,
32591
]
],
[
[
1302,
1310
],
[
2790,
2798
],
[
2876,
2884
],
[
21981,
21989
],
[
22171,
22179
],
[
22361,
22369
]
],
[
[
1333,
1338
],
[
7495,
7500
],
[
9407,
9412
]
],
[
[
1373,
1391
],
[
1529,
1547
],
[
1827,
1845
]
],
[
[
1438,
1459
],
[
3520,
3524
]
],
[
[
1477,
1496
],
[
3520,
3524
]
],
[
[
1505,
1528
],
[
22623,
22646
]
],
[
[
21330,
21358
]
]
] |
import os
import json
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
class Action:
def __init__(self, card, config):
config.pop("action", None)
self.card = card
self.config = config
def env_vars_for_object(self, config, prefix):
env_vars = {}
config.pop("action", None)
config.pop("id", None)
for key, value in config.items():
if value and isinstance(value, dict):
nested_env_vars = self.env_vars_for_object(
value, "{}_{}".format(prefix, key.upper())
)
env_vars = {**env_vars, **nested_env_vars}
else:
env_vars["{}_{}".format(prefix, key.upper())] = value
return env_vars
def env_vars(self):
with open(CURRENT_DIR + "/../../config/config.json", "r") as f:
global_config = json.load(f)
env_vars = self.env_vars_for_object(self.card, "CARD")
env_vars["magic_cards_room"] = global_config["room"]
prefix = self.__class__.__name__.replace("Action", "").upper()
return {**env_vars, **self.env_vars_for_object(self.config, prefix)}
class ChromecastAction(Action):
def __init__(self, card, config, chromecast):
super().__init__(card, config)
self.chromecast = chromecast
| [
[
[
7,
9
],
[
37,
39
],
[
53,
55
]
],
[
[
17,
21
],
[
897,
901
]
],
[
[
23,
34
],
[
815,
826
]
],
[
[
89,
95
],
[
1209,
1215
]
],
[
[
1192,
1208
]
]
] |
from office365.runtime.client_value_collection import ClientValueCollection
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.runtime.resource_path import ResourcePath
from office365.sharepoint.base_entity import BaseEntity
from office365.sharepoint.tenant.administration.hubSiteProperties import HubSiteProperties
from office365.sharepoint.tenant.administration.secondary_administrators_fields_data import \
SecondaryAdministratorsFieldsData
from office365.sharepoint.tenant.administration.secondary_administrators_info import SecondaryAdministratorsInfo
from office365.sharepoint.tenant.administration.site_properties import SiteProperties
from office365.sharepoint.tenant.administration.site_properties_collection import SitePropertiesCollection
from office365.sharepoint.tenant.administration.sitePropertiesEnumerableFilter import SitePropertiesEnumerableFilter
from office365.sharepoint.tenant.administration.spo_operation import SpoOperation
class Tenant(BaseEntity):
def __init__(self, context):
super().__init__(context, ResourcePath("Microsoft.Online.SharePoint.TenantAdministration.Tenant"),
"Microsoft.Online.SharePoint.TenantAdministration")
def get_site_secondary_administrators(self, site_id):
"""
Gets site collection administrators
:type site_id: str
"""
return_type = ClientValueCollection(SecondaryAdministratorsInfo)
payload = SecondaryAdministratorsFieldsData(site_id)
qry = ServiceOperationQuery(self, "GetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", return_type)
self.context.add_query(qry)
return return_type
def set_site_secondary_administrators(self, site_id, emails, names=None):
"""
Sets site collection administrators
:type names: list[str] or None
:type emails: list[str]
:type site_id: str
"""
payload = SecondaryAdministratorsFieldsData(site_id, emails, names)
qry = ServiceOperationQuery(self, "SetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", None)
self.context.add_query(qry)
return self
def register_hub_site(self, site_url):
"""
Registers an existing site as a hub site.
:param str site_url:
:return:
"""
return_type = HubSiteProperties(self.context)
params = {"siteUrl": site_url}
qry = ServiceOperationQuery(self, "RegisterHubSite", None, params, None, return_type)
self.context.add_query(qry)
return return_type
def unregister_hub_site(self, siteUrl):
"""
Unregisters a hub site so that it is no longer a hub site.
:param str siteUrl:
:return:
"""
params = {"siteUrl": siteUrl}
qry = ServiceOperationQuery(self, "UnregisterHubSite", None, params, None, None)
self.context.add_query(qry)
return self
def create_site(self, site_create_props):
"""Queues a site collection for creation with the specified properties.
:param SiteCreationProperties site_create_props:
A SiteCreationProperties object that contains the initial properties
of the new site collection.
"""
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "CreateSite", None, site_create_props, "siteCreationProperties", result)
self.context.add_query(qry)
return result
def remove_site(self, site_url):
"""Deletes the site with the specified URL
:param str site_url: A string representing the URL of the site.
"""
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "removeSite", [site_url], None, None, result)
self.context.add_query(qry)
return result
def remove_deleted_site(self, site_url):
pass
def restore_deleted_site(self, site_url):
pass
def get_site_properties_by_url(self, url, include_detail):
"""
:param str url: A string that represents the site URL.
:param bool include_detail: A Boolean value that indicates whether to include all of the SPSite properties.
"""
site_props = SiteProperties(self.context)
self._sites.add_child(site_props)
payload = {
'url': url,
'includeDetail': include_detail
}
qry = ServiceOperationQuery(self, "getSitePropertiesByUrl", None, payload, None, site_props)
self.context.add_query(qry)
return site_props
def get_site_properties_from_sharepoint_by_filters(self, _filter, start_index=0, include_detail=False):
"""
:param bool include_detail:
:param int start_index:
:param str _filter:
"""
site_props_col = SitePropertiesCollection(self.context)
qry = ServiceOperationQuery(self, "getSitePropertiesFromSharePointByFilters",
None,
SitePropertiesEnumerableFilter(_filter, start_index, include_detail),
"speFilter",
site_props_col)
self.context.add_query(qry)
return site_props_col
@property
def root_site_url(self):
"""
:rtype: str or None
"""
return self.properties.get('RootSiteUrl', None)
@property
def _sites(self):
"""Gets a collection of sites."""
if self.is_property_available('sites'):
return self.properties['sites']
else:
return SitePropertiesCollection(self.context, ResourcePath("sites", self.resource_path))
| [
[
[
54,
75
],
[
1424,
1445
]
],
[
[
138,
159
],
[
1550,
1571
],
[
2112,
2133
],
[
2596,
2617
],
[
2973,
2994
],
[
3472,
3493
],
[
3863,
3884
],
[
4586,
4607
],
[
5043,
5064
]
],
[
[
204,
216
],
[
1097,
1109
],
[
5826,
5838
]
],
[
[
262,
272
],
[
1016,
1026
]
],
[
[
346,
363
],
[
2511,
2528
]
],
[
[
462,
495
],
[
1493,
1526
],
[
2040,
2073
]
],
[
[
581,
608
],
[
1446,
1473
]
],
[
[
680,
694
],
[
4403,
4417
]
],
[
[
777,
801
],
[
4990,
5014
],
[
5787,
5811
]
],
[
[
888,
918
],
[
5193,
5223
]
],
[
[
988,
1000
],
[
3431,
3443
],
[
3822,
3834
]
],
[
[
1009,
1015
]
]
] |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
__version__ = '1.18.1333'
# -----------------------------------------------------------------------------
import asyncio
import concurrent
import socket
import time
import math
import random
import certifi
import aiohttp
import ssl
import sys
import yarl
# -----------------------------------------------------------------------------
from ccxt.async_support.base.throttle import throttle
# -----------------------------------------------------------------------------
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
# -----------------------------------------------------------------------------
from ccxt.base.exchange import Exchange as BaseExchange
# -----------------------------------------------------------------------------
__all__ = [
'BaseExchange',
'Exchange',
]
# -----------------------------------------------------------------------------
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
self.open()
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
# Create our SSL context object with our CA cert file
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
# Pass this SSL context to aiohttp and create a TCPConnector
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def wait_for_token(self):
while self.rateLimitTokens <= 1:
# if self.verbose:
# print('Waiting for tokens: Exchange: {0}'.format(self.id))
self.add_new_tokens()
seconds_delays = [0.001, 0.005, 0.022, 0.106, 0.5]
delay = random.choice(seconds_delays)
await asyncio.sleep(delay)
self.rateLimitTokens -= 1
def add_new_tokens(self):
# if self.verbose:
# print('Adding new tokens: Exchange: {0}'.format(self.id))
now = time.monotonic()
time_since_update = now - self.rateLimitUpdateTime
new_tokens = math.floor((0.8 * 1000.0 * time_since_update) / self.rateLimit)
if new_tokens > 1:
self.rateLimitTokens = min(self.rateLimitTokens + new_tokens, self.rateLimitMaxTokens)
self.rateLimitUpdateTime = now
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
"""A better wrapper over request for deferred signing"""
if self.enableRateLimit:
await self.throttle()
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
"""Perform a HTTP request and return decoded JSON data"""
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
raise ExchangeNotAvailable(method + ' ' + url)
except concurrent.futures._base.TimeoutError as e:
raise RequestTimeout(method + ' ' + url)
except aiohttp.client_exceptions.ClientConnectionError as e:
raise ExchangeNotAvailable(method + ' ' + url)
except aiohttp.client_exceptions.ClientError as e: # base exception class
raise ExchangeError(method + ' ' + url)
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_rest_errors(http_status_code, http_status_text, http_response, url, method)
self.handle_rest_response(http_response, json_response, url, method)
if json_response is not None:
return json_response
return http_response
async def load_markets(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcv(trades, timeframe, since, limit)
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
| [
[
[
106,
117
]
],
[
[
221,
228
],
[
1320,
1327
],
[
3533,
3540
]
],
[
[
236,
246
],
[
6487,
6497
]
],
[
[
254,
260
],
[
6390,
6396
]
],
[
[
268,
272
],
[
3732,
3736
]
],
[
[
280,
284
],
[
3829,
3833
]
],
[
[
292,
298
],
[
3485,
3491
]
],
[
[
306,
313
],
[
1584,
1591
]
],
[
[
321,
328
],
[
2823,
2830
],
[
2908,
2915
],
[
6600,
6607
],
[
6729,
6736
]
],
[
[
336,
339
],
[
2647,
2650
]
],
[
[
347,
350
],
[
2286,
2289
]
],
[
[
358,
362
],
[
5246,
5250
]
],
[
[
490,
498
],
[
1769,
1777
]
],
[
[
610,
623
],
[
6815,
6828
],
[
10986,
10999
]
],
[
[
653,
673
],
[
6430,
6450
],
[
6672,
6692
]
],
[
[
703,
717
],
[
6549,
6563
]
],
[
[
747,
759
],
[
9852,
9864
],
[
10365,
10377
],
[
11213,
11225
],
[
11390,
11402
],
[
12515,
12527
]
],
[
[
873,
897
],
[
1128,
1140
]
],
[
[
980,
987
]
],
[
[
1119,
1127
],
[
1635,
1643
],
[
8203,
8211
]
]
] |
import csv
def ClassFactory(class_name, dictionary):
return type(class_name, (object,), dictionary)
class CsvReader:
def __init__(self, filepath):
self.data = []
with open(filepath) as csv_files:
csv_data = csv.DictReader(csv_files, delimiter=',')
for row in csv_data:
self.data.append(row)
pass
def return_data_object(self, class_name):
objects = []
for row in self.data:
objects.append(ClassFactory(class_name, row))
return objects
| [
[
[
7,
10
],
[
248,
251
]
],
[
[
17,
29
],
[
498,
510
]
],
[
[
114,
123
]
]
] |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
import mock
from dashboard.pinpoint.models.quest import read_value
from tracing.value import histogram_set
from tracing.value import histogram as histogram_module
from tracing.value.diagnostics import generic_set
from tracing.value.diagnostics import reserved_infos
_BASE_ARGUMENTS_HISTOGRAMS = {'benchmark': 'speedometer'}
_BASE_ARGUMENTS_GRAPH_JSON = {
'chart': 'chart_name',
'trace': 'trace_name',
}
class ReadHistogramsJsonValueQuestTest(unittest.TestCase):
def testMinimumArguments(self):
quest = read_value.ReadHistogramsJsonValue.FromDict(
_BASE_ARGUMENTS_HISTOGRAMS)
expected = read_value.ReadHistogramsJsonValue('chartjson-output.json')
self.assertEqual(quest, expected)
def testAllArguments(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['chart'] = 'timeToFirst'
arguments['tir_label'] = 'pcv1-cold'
arguments['trace'] = 'trace_name'
arguments['statistic'] = 'avg'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', 'timeToFirst',
'pcv1-cold', 'trace_name', 'avg')
self.assertEqual(quest, expected)
def testPerformanceTestSuite(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer/perf_results.json')
self.assertEqual(quest, expected)
def testPerformanceTestSuiteWindows(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['dimensions'] = [{'key': 'os', 'value': 'Windows-10'}]
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer\\perf_results.json')
self.assertEqual(quest, expected)
class ReadGraphJsonValueQuestTest(unittest.TestCase):
def testAllArguments(self):
quest = read_value.ReadGraphJsonValue.FromDict(_BASE_ARGUMENTS_GRAPH_JSON)
expected = read_value.ReadGraphJsonValue('chart_name', 'trace_name')
self.assertEqual(quest, expected)
def testMissingChart(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['chart']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
def testMissingTrace(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['trace']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
class _ReadValueExecutionTest(unittest.TestCase):
def setUp(self):
patcher = mock.patch('dashboard.services.isolate.Retrieve')
self._retrieve = patcher.start()
self.addCleanup(patcher.stop)
def SetOutputFileContents(self, contents):
self._retrieve.side_effect = (
'{"files": {"chartjson-output.json": {"h": "output json hash"}}}',
json.dumps(contents),
)
def assertReadValueError(self, execution):
self.assertTrue(execution.completed)
self.assertTrue(execution.failed)
self.assertIsInstance(execution.exception, basestring)
last_exception_line = execution.exception.splitlines()[-1]
self.assertTrue(last_exception_line.startswith('ReadValueError'))
def assertReadValueSuccess(self, execution):
self.assertTrue(execution.completed)
self.assertFalse(execution.failed)
self.assertEqual(execution.result_arguments, {})
def assertRetrievedOutputJson(self):
expected_calls = [
mock.call('server', 'output hash'),
mock.call('server', 'output json hash'),
]
self.assertEqual(self._retrieve.mock_calls, expected_calls)
class ReadHistogramsJsonValueTest(_ReadValueExecutionTest):
def testReadHistogramsJsonValue(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatistic(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (1,))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatisticNoSamples(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueMultipleHistograms(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist2 = histogram_module.Histogram('hist', 'count')
hist2.AddSample(0)
hist2.AddSample(1)
hist2.AddSample(2)
hist3 = histogram_module.Histogram('some_other_histogram', 'count')
hist3.AddSample(3)
hist3.AddSample(4)
hist3.AddSample(5)
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2, 0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist3 = histogram_module.Histogram('hist3', 'count')
hist3.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url2']))
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
{
'key': 'trace',
'value': 'trace_url3',
'url': 'trace_url3',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsDiagnosticRefSkipTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist2.diagnostics[reserved_infos.TRACE_URLS.name].guid = 'foo'
histograms = histogram_set.HistogramSet([hist, hist2])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoTirLabel(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoStory(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummary(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hists[0].name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, tuple(samples))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueEmptyHistogramSet(self):
self.SetOutputFileContents([])
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithMissingHistogram(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='does_not_exist')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueTirLabelWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueStoryWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
class ReadGraphJsonValueTest(_ReadValueExecutionTest):
def testReadGraphJsonValue(self):
self.SetOutputFileContents(
{'chart': {'traces': {'trace': ['126444.869721', '0.0']}}})
quest = read_value.ReadGraphJsonValue('chart', 'trace')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (126444.869721,))
self.assertRetrievedOutputJson()
def testReadGraphJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingChart(self):
self.SetOutputFileContents({})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingTrace(self):
self.SetOutputFileContents({'chart': {'traces': {}}})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
| [
[
[
170,
174
],
[
3192,
3196
]
],
[
[
182,
190
],
[
648,
656
],
[
2176,
2184
],
[
2853,
2861
]
],
[
[
199,
203
],
[
2907,
2911
],
[
3789,
3793
],
[
3833,
3837
]
],
[
[
249,
259
],
[
715,
725
],
[
811,
821
],
[
1154,
1164
],
[
1225,
1235
],
[
1540,
1550
],
[
1611,
1621
],
[
1953,
1963
],
[
2024,
2034
],
[
2239,
2249
],
[
2321,
2331
],
[
2569,
2579
],
[
2771,
2781
],
[
4532,
4542
],
[
5387,
5397
],
[
6205,
6215
],
[
7278,
7288
],
[
8303,
8313
],
[
9860,
9870
],
[
11075,
11085
],
[
11815,
11825
],
[
12794,
12804
],
[
13254,
13264
],
[
13590,
13600
],
[
14054,
14064
],
[
14500,
14510
],
[
14945,
14955
],
[
15410,
15420
],
[
15842,
15852
],
[
16230,
16240
],
[
16501,
16511
],
[
16795,
16805
]
],
[
[
286,
299
],
[
4186,
4199
],
[
5041,
5054
],
[
5859,
5872
],
[
6918,
6931
],
[
8188,
8201
],
[
9752,
9765
],
[
10844,
10857
],
[
11597,
11610
],
[
12564,
12577
],
[
13953,
13966
],
[
14399,
14412
],
[
14844,
14857
],
[
15309,
15322
]
],
[
[
326,
355
],
[
4059,
4075
],
[
4914,
4930
],
[
5798,
5814
],
[
6525,
6541
],
[
6647,
6663
],
[
6772,
6788
],
[
7660,
7676
],
[
7857,
7873
],
[
8020,
8036
],
[
9320,
9336
],
[
9517,
9533
],
[
10717,
10733
],
[
11470,
11486
],
[
12257,
12273
],
[
13892,
13908
],
[
14338,
14354
],
[
14783,
14799
],
[
15248,
15264
]
],
[
[
394,
405
],
[
4305,
4316
],
[
4431,
4442
],
[
5160,
5171
],
[
5286,
5297
],
[
5978,
5989
],
[
6104,
6115
],
[
7051,
7062
],
[
7177,
7188
],
[
7791,
7802
],
[
7968,
7979
],
[
8131,
8142
],
[
9451,
9462
],
[
9628,
9639
],
[
10963,
10974
],
[
11713,
11724
],
[
12439,
12450
],
[
12682,
12693
]
],
[
[
444,
458
],
[
4265,
4279
],
[
4394,
4408
],
[
5120,
5134
],
[
5249,
5263
],
[
5938,
5952
],
[
6067,
6081
],
[
7011,
7025
],
[
7140,
7154
],
[
7747,
7761
],
[
7924,
7938
],
[
8087,
8101
],
[
9407,
9421
],
[
9584,
9598
],
[
9690,
9704
],
[
10923,
10937
],
[
11676,
11690
],
[
12396,
12410
],
[
12642,
12656
]
],
[
[
462,
488
],
[
768,
794
],
[
961,
987
],
[
1449,
1475
],
[
1793,
1819
]
],
[
[
520,
546
],
[
2278,
2304
],
[
2469,
2495
],
[
2671,
2697
]
],
[
[
615,
647
]
],
[
[
2148,
2175
]
],
[
[
2829,
2852
],
[
3980,
4003
],
[
15666,
15689
]
],
[
[
3952,
3979
]
],
[
[
15643,
15665
]
]
] |
"""Tests for fan platforms."""
import pytest
from homeassistant.components.fan import FanEntity
class BaseFan(FanEntity):
"""Implementation of the abstract FanEntity."""
def __init__(self):
"""Initialize the fan."""
def test_fanentity():
"""Test fan entity methods."""
fan = BaseFan()
assert fan.state == "off"
assert fan.preset_modes is None
assert fan.supported_features == 0
assert fan.percentage_step == 1
assert fan.speed_count == 100
assert fan.capability_attributes == {}
# Test set_speed not required
with pytest.raises(NotImplementedError):
fan.oscillate(True)
with pytest.raises(AttributeError):
fan.set_speed("low")
with pytest.raises(NotImplementedError):
fan.set_percentage(0)
with pytest.raises(NotImplementedError):
fan.set_preset_mode("auto")
with pytest.raises(NotImplementedError):
fan.turn_on()
with pytest.raises(NotImplementedError):
fan.turn_off()
async def test_async_fanentity(hass):
"""Test async fan entity methods."""
fan = BaseFan()
fan.hass = hass
assert fan.state == "off"
assert fan.preset_modes is None
assert fan.supported_features == 0
assert fan.percentage_step == 1
assert fan.speed_count == 100
assert fan.capability_attributes == {}
# Test set_speed not required
with pytest.raises(NotImplementedError):
await fan.async_oscillate(True)
with pytest.raises(AttributeError):
await fan.async_set_speed("low")
with pytest.raises(NotImplementedError):
await fan.async_set_percentage(0)
with pytest.raises(NotImplementedError):
await fan.async_set_preset_mode("auto")
with pytest.raises(NotImplementedError):
await fan.async_turn_on()
with pytest.raises(NotImplementedError):
await fan.async_turn_off()
with pytest.raises(NotImplementedError):
await fan.async_increase_speed()
with pytest.raises(NotImplementedError):
await fan.async_decrease_speed()
@pytest.mark.parametrize(
"attribute_name, attribute_value",
[
("current_direction", "forward"),
("oscillating", True),
("percentage", 50),
("preset_mode", "medium"),
("preset_modes", ["low", "medium", "high"]),
("speed_count", 50),
("supported_features", 1),
],
)
def test_fanentity_attributes(attribute_name, attribute_value):
"""Test fan entity attribute shorthand."""
fan = BaseFan()
setattr(fan, f"_attr_{attribute_name}", attribute_value)
assert getattr(fan, attribute_name) == attribute_value
| [
[
[
39,
45
],
[
2054,
2060
],
[
577,
583
],
[
650,
656
],
[
719,
725
],
[
794,
800
],
[
875,
881
],
[
942,
948
],
[
1383,
1389
],
[
1468,
1474
],
[
1549,
1555
],
[
1636,
1642
],
[
1729,
1735
],
[
1808,
1814
],
[
1888,
1894
],
[
1974,
1980
]
],
[
[
88,
97
],
[
114,
123
]
],
[
[
106,
113
],
[
306,
313
],
[
1092,
1099
],
[
2507,
2514
]
],
[
[
243,
257
]
],
[
[
1003,
2050
]
],
[
[
2390,
2415
]
]
] |
from ynab_api import __version__
def test_version():
assert __version__ == '0.1.0'
| [
[
[
21,
32
],
[
66,
77
]
],
[
[
39,
51
]
]
] |
from vkbottle_types import GroupTypes
from vkbottle_types.events import GroupEventType, UserEventType
from .api import (
ABCAPI,
API,
DEFAULT_REQUEST_VALIDATORS,
DEFAULT_RESPONSE_VALIDATORS,
ABCRequestRescheduler,
ABCRequestValidator,
ABCResponseValidator,
ABCTokenGenerator,
BlockingRequestRescheduler,
ConsistentTokenGenerator,
SingleTokenGenerator,
Token,
get_token_generator,
)
from .dispatch import (
ABCDispenseView,
ABCHandler,
ABCRouter,
ABCRule,
ABCStateDispenser,
ABCView,
AndRule,
BaseMiddleware,
BaseReturnManager,
BaseStateGroup,
BuiltinStateDispenser,
MiddlewareError,
NotRule,
OrRule,
Router,
StatePeer,
)
from .exception_factory import (
ABCErrorHandler,
CaptchaError,
CodeException,
ErrorHandler,
VKAPIError,
swear,
)
from .framework import (
ABCBlueprint,
ABCFramework,
Bot,
BotBlueprint,
User,
UserBlueprint,
run_multibot,
)
from .http import ABCHTTPClient, AiohttpClient, SingleAiohttpClient
from .polling import ABCPolling, BotPolling, UserPolling
from .tools import (
EMPTY_KEYBOARD,
ABCAction,
ABCStorage,
ABCValidator,
AudioUploader,
AuthError,
BaseContext,
BaseUploader,
BotTypes,
CallableValidator,
Callback,
CtxStorage,
DelayedTask,
DocMessagesUploader,
DocUploader,
DocWallUploader,
EqualsValidator,
GraffitiUploader,
IsInstanceValidator,
Keyboard,
KeyboardButtonColor,
Location,
LoopWrapper,
OpenAppEvent,
OpenLink,
OpenLinkEvent,
PhotoChatFaviconUploader,
PhotoFaviconUploader,
PhotoMarketUploader,
PhotoMessageUploader,
PhotoToAlbumUploader,
PhotoUploader,
PhotoWallUploader,
ShowSnackbarEvent,
TemplateElement,
Text,
UserAuth,
UserTypes,
VideoUploader,
VKApps,
VKPay,
VoiceMessageUploader,
keyboard_gen,
load_blueprints_from_package,
run_in_task,
run_sync,
template_gen,
vkscript,
)
event_types = GroupTypes
__all__ = (
"ABCAction",
"ABCAPI",
"ABCBlueprint",
"ABCDispenseView",
"ABCErrorHandler",
"ABCFramework",
"ABCHandler",
"ABCHTTPClient",
"ABCPolling",
"ABCRequestRescheduler",
"ABCRequestValidator",
"ABCResponseValidator",
"ABCRouter",
"ABCRule",
"ABCStateDispenser",
"ABCStorage",
"ABCTokenGenerator",
"ABCValidator",
"ABCView",
"AiohttpClient",
"AndRule",
"API",
"AudioUploader",
"AuthError",
"BaseContext",
"BaseMiddleware",
"BaseReturnManager",
"BaseStateGroup",
"BaseUploader",
"BlockingRequestRescheduler",
"Bot",
"BotBlueprint",
"BotPolling",
"BotTypes",
"BuiltinStateDispenser",
"CallableValidator",
"Callback",
"CaptchaError",
"CodeException",
"ConsistentTokenGenerator",
"CtxStorage",
"DEFAULT_REQUEST_VALIDATORS",
"DEFAULT_RESPONSE_VALIDATORS",
"DelayedTask",
"DocMessagesUploader",
"DocUploader",
"DocWallUploader",
"EMPTY_KEYBOARD",
"EqualsValidator",
"ErrorHandler",
"get_token_generator",
"GraffitiUploader",
"GroupEventType",
"GroupTypes",
"IsInstanceValidator",
"keyboard_gen",
"Keyboard",
"KeyboardButtonColor",
"load_blueprints_from_package",
"Location",
"LoopWrapper",
"MiddlewareError",
"NotRule",
"OpenAppEvent",
"OpenLink",
"OpenLinkEvent",
"OrRule",
"PhotoChatFaviconUploader",
"PhotoFaviconUploader",
"PhotoMarketUploader",
"PhotoMessageUploader",
"PhotoToAlbumUploader",
"PhotoUploader",
"PhotoWallUploader",
"Router",
"run_in_task",
"run_multibot",
"run_sync",
"ShowSnackbarEvent",
"SingleAiohttpClient",
"SingleTokenGenerator",
"StatePeer",
"swear",
"template_gen",
"TemplateElement",
"Text",
"Token",
"User",
"UserAuth",
"UserBlueprint",
"UserEventType",
"UserPolling",
"UserTypes",
"VideoUploader",
"VKAPIError",
"VKApps",
"VKPay",
"vkscript",
"VoiceMessageUploader",
)
| [
[
[
27,
37
],
[
2091,
2101
]
],
[
[
72,
86
]
],
[
[
88,
101
]
],
[
[
126,
132
]
],
[
[
138,
141
]
],
[
[
147,
173
]
],
[
[
179,
206
]
],
[
[
212,
233
]
],
[
[
239,
258
]
],
[
[
264,
284
]
],
[
[
290,
307
]
],
[
[
313,
339
]
],
[
[
345,
369
]
],
[
[
375,
395
]
],
[
[
401,
406
]
],
[
[
412,
431
]
],
[
[
463,
478
]
],
[
[
484,
494
]
],
[
[
500,
509
]
],
[
[
515,
522
]
],
[
[
528,
545
]
],
[
[
551,
558
]
],
[
[
564,
571
]
],
[
[
577,
591
]
],
[
[
597,
614
]
],
[
[
620,
634
]
],
[
[
640,
661
]
],
[
[
667,
682
]
],
[
[
688,
695
]
],
[
[
701,
707
]
],
[
[
713,
719
]
],
[
[
725,
734
]
],
[
[
775,
790
]
],
[
[
796,
808
]
],
[
[
814,
827
]
],
[
[
833,
845
]
],
[
[
851,
861
]
],
[
[
867,
872
]
],
[
[
905,
917
]
],
[
[
923,
935
]
],
[
[
941,
944
]
],
[
[
950,
962
]
],
[
[
968,
972
]
],
[
[
978,
991
]
],
[
[
997,
1009
]
],
[
[
1031,
1044
]
],
[
[
1046,
1059
]
],
[
[
1061,
1080
]
],
[
[
1102,
1112
]
],
[
[
1114,
1124
]
],
[
[
1126,
1137
]
],
[
[
1163,
1177
]
],
[
[
1183,
1192
]
],
[
[
1198,
1208
]
],
[
[
1214,
1226
]
],
[
[
1232,
1245
]
],
[
[
1251,
1260
]
],
[
[
1266,
1277
]
],
[
[
1283,
1295
]
],
[
[
1301,
1309
]
],
[
[
1315,
1332
]
],
[
[
1338,
1346
]
],
[
[
1352,
1362
]
],
[
[
1368,
1379
]
],
[
[
1385,
1404
]
],
[
[
1410,
1421
]
],
[
[
1427,
1442
]
],
[
[
1448,
1463
]
],
[
[
1469,
1485
]
],
[
[
1491,
1510
]
],
[
[
1516,
1524
]
],
[
[
1530,
1549
]
],
[
[
1555,
1563
]
],
[
[
1569,
1580
]
],
[
[
1586,
1598
]
],
[
[
1604,
1612
]
],
[
[
1618,
1631
]
],
[
[
1637,
1661
]
],
[
[
1667,
1687
]
],
[
[
1693,
1712
]
],
[
[
1718,
1738
]
],
[
[
1744,
1764
]
],
[
[
1770,
1783
]
],
[
[
1789,
1806
]
],
[
[
1812,
1829
]
],
[
[
1835,
1850
]
],
[
[
1856,
1860
]
],
[
[
1866,
1874
]
],
[
[
1880,
1889
]
],
[
[
1895,
1908
]
],
[
[
1914,
1920
]
],
[
[
1926,
1931
]
],
[
[
1937,
1957
]
],
[
[
1963,
1975
]
],
[
[
1981,
2009
]
],
[
[
2015,
2026
]
],
[
[
2032,
2040
]
],
[
[
2046,
2058
]
],
[
[
2064,
2072
]
],
[
[
2077,
2088
]
],
[
[
2103,
2110
]
]
] |
import cv2 as cv
image = cv.imread() | [
[
[
7,
16
],
[
28,
30
]
],
[
[
20,
25
]
]
] |
# -*- encoding: utf-8
from sqlalchemy.testing import eq_, engines
from sqlalchemy import *
from sqlalchemy import exc
from sqlalchemy.dialects.mssql import pyodbc, pymssql
from sqlalchemy.engine import url
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing.mock import Mock
class ParseConnectTest(fixtures.TestBase):
def test_pyodbc_connect_dsn_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_old_style_dsn_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql:///?dsn=mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_dsn_non_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_dsn_extra(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
'english&foo=bar')
connection = dialect.create_connect_args(u)
dsn_string = connection[0][0]
assert ";LANGUAGE=us_english" in dsn_string
assert ";foo=bar" in dsn_string
def test_pyodbc_connect(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@hostspec/database')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_connect_comma_port(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec:12345/data'
'base')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=datab'
'ase;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_config_port(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?p'
'ort=12345')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password;port=12345'], {}], connection)
def test_pyodbc_extra_connect(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?L'
'ANGUAGE=us_english&foo=bar')
connection = dialect.create_connect_args(u)
eq_(connection[1], {})
eq_(connection[0][0]
in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
def test_pyodbc_odbc_connect(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
'%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
'%3BUID%3Dusername%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_odbc_connect_with_dsn(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
'%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
)
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'],
{}], connection)
def test_pyodbc_odbc_connect_ignores_other_values(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
'%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Duse'
'rname%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pymssql_port_setting(self):
dialect = pymssql.dialect()
u = \
url.make_url('mssql+pymssql://scott:tiger@somehost/test')
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost', 'password': 'tiger',
'user': 'scott', 'database': 'test'}], connection
)
u = \
url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test')
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost:5000', 'password': 'tiger',
'user': 'scott', 'database': 'test'}], connection
)
def test_pymssql_disconnect(self):
dialect = pymssql.dialect()
for error in [
'Adaptive Server connection timed out',
'Net-Lib error during Connection reset by peer',
'message 20003',
'Error 10054',
'Not connected to any MS SQL server',
'Connection is closed'
]:
eq_(dialect.is_disconnect(error, None, None), True)
eq_(dialect.is_disconnect("not an error", None, None), False)
@testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
"FreeTDS specific test")
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
def _bad_version(connection):
return 95, 10, 255
engine.dialect._get_server_version_info = _bad_version
assert_raises_message(exc.SAWarning,
'Unrecognized server version info',
engine.connect)
class VersionDetectionTest(fixtures.TestBase):
def test_pymssql_version(self):
dialect = pymssql.MSDialect_pymssql()
for vers in [
"Microsoft SQL Server Blah - 11.0.9216.62",
"Microsoft SQL Server (XYZ) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation",
"Microsoft SQL Azure (RTM) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation"
]:
conn = Mock(scalar=Mock(return_value=vers))
eq_(
dialect._get_server_version_info(conn),
(11, 0, 9216, 62)
)
| [
[
[
53,
56
],
[
601,
604
],
[
862,
865
],
[
1129,
1132
],
[
1789,
1792
],
[
2173,
2176
],
[
2569,
2572
],
[
2981,
2984
],
[
3012,
3015
],
[
3679,
3682
],
[
4133,
4136
],
[
4667,
4670
],
[
5012,
5015
],
[
5306,
5309
],
[
5864,
5867
],
[
5925,
5928
],
[
7023,
7026
]
],
[
[
58,
65
],
[
6154,
6161
]
],
[
[
89,
90
]
],
[
[
114,
117
],
[
6343,
6346
]
],
[
[
156,
162
],
[
482,
488
],
[
737,
743
],
[
992,
998
],
[
1256,
1262
],
[
1640,
1646
],
[
1976,
1982
],
[
2367,
2373
],
[
2762,
2768
],
[
3379,
3385
],
[
3869,
3875
],
[
4306,
4312
]
],
[
[
164,
171
],
[
4849,
4856
],
[
5513,
5520
],
[
6573,
6580
]
],
[
[
202,
205
],
[
511,
514
],
[
766,
769
],
[
1021,
1024
],
[
1299,
1302
],
[
1669,
1672
],
[
2019,
2022
],
[
2410,
2413
],
[
2805,
2808
],
[
3422,
3425
],
[
3912,
3915
],
[
4349,
4352
],
[
4894,
4897
],
[
5183,
5186
]
],
[
[
237,
245
],
[
396,
404
],
[
6499,
6507
]
],
[
[
269,
276
],
[
5993,
6000
]
],
[
[
308,
329
],
[
6321,
6342
]
],
[
[
366,
370
],
[
6974,
6978
],
[
6986,
6990
]
],
[
[
379,
395
]
],
[
[
6478,
6498
]
]
] |
# Import spacy
import spacy
# Instantiate the English model: nlp
nlp = spacy.load('en', tagger=False, parser= False, matcher = False)
# Create a new document: doc
doc = nlp(article)
# Print all of the found entities and their labels
for ent in doc.ents:
print(ent.label_, ent.text)
""" <script.py> output:
ORG Uber
ORG Uber
ORG Apple
ORG Uber
ORG Uber
PERSON Travis Kalanick
ORG Uber
PERSON Tim Cook
ORG Apple
CARDINAL Millions
ORG Uber
GPE drivers’
LOC Silicon Valley’s
ORG Yahoo
PERSON Marissa Mayer
MONEY $186m """ | [
[
[
22,
27
],
[
72,
77
]
],
[
[
66,
69
],
[
171,
174
]
],
[
[
165,
168
],
[
247,
250
]
],
[
[
240,
243
],
[
267,
270
],
[
279,
282
]
]
] |
import glob
import cv2
import os
def extract_frame(movie_files_dir, out_dir):
movie_files = glob.glob(movie_files_dir)
if not movie_files:
print('movie files are not found.')
return
for movie_file in movie_files:
ext = movie_file.split('.')[-1]
if not ext == 'mp4' or not ext == 'MP4':
print(f"can't extract this movie file: {movie_file}")
continue
out_dir = out_dir
if not os.path.exists(out_dir):
os.mkdir(out_dir)
cap = cv2.VideoCapture(movie_file)
if not cap.isOpened():
print(f"can't extract this movie file: {movie_file}")
return
digit = len(str(int(cap.get(cv2.CAP_PROP_FRAME_COUNT))))
n = 0
while True:
ret, frame = cap.read()
if ret:
cv2.imwrite(f"{movie_file}_{str(n).zfill(digit)}.jpg", frame)
n += 1
continue
return
print(f'{len(movie_files)} movie files extracted')
if __name__ == '__main__':
movie_files_dir = 'movies/*.mp4'
out_dir = 'out/'
extract_frame(movie_files_dir, out_dir)
| [
[
[
7,
11
],
[
98,
102
]
],
[
[
19,
22
],
[
533,
536
],
[
715,
718
],
[
850,
853
]
],
[
[
30,
32
],
[
463,
465
],
[
500,
502
]
],
[
[
39,
52
],
[
1126,
1139
]
],
[
[
1068,
1083
],
[
1140,
1155
]
],
[
[
1105,
1112
],
[
1157,
1164
]
]
] |
#!/usr/bin/env python
from ping360_sonar.sensor import Ping360
from numpy import pi, sqrt, tan, cos, sign
from brping import definitions
class SonarInterface:
samplePeriodTickDuration = 25e-9
firmwareMinTransmitDuration = 5
firmwareMaxTransmitDuration = 500
firmwareMaxSamples = 1200
firmwareMinSamplePeriod = 80
maxDurationRatio = 64e6
def __init__(self, port, baudrate, fallback_emulated):
self.angle = 0
try:
self.sonar = Ping360(port, baudrate)
if self.sonar.initialize():
return
except:
pass
if not fallback_emulated:
raise RuntimeError('Cannot initialize sonar')
print('Using emulated sonar')
self.sonar = None
def configureAngles(self, aperture_deg, step_deg, ensure_divisor):
# to gradians
target_half_aperture = int(aperture_deg*200/360+0.5)
best_half_aperture = target_half_aperture
self.angle_step = int(round(step_deg*400/360))
# ensure angle_step is a divisor of max-min in gradians, necessary for LaserScan messages
if ensure_divisor:
# look around step, allow increased aperture
target_step = self.angle_step
# not too far from requested aperture, as close as possible to requested step (impacts turn duration)
computeCost = lambda step,half_aperture: 1000 if half_aperture%step != 0 else abs(step-target_step) + abs(half_aperture-target_half_aperture)
best_cost = computeCost(self.angle_step, target_half_aperture)
if best_cost != 0:
for step in range(1, target_step*2):
for half_aperture in range(target_half_aperture, min(target_half_aperture+10, 200)+1):
cost = computeCost(step, half_aperture)
if cost < best_cost:
best_cost = cost
self.angle_step = step
best_half_aperture = half_aperture
self.angle_min = -best_half_aperture
self.angle_max = best_half_aperture
if self.angle_max == 200:
self.angle_max -= self.angle_step
if self.angle < self.angle_min or self.angle > self.angle_max or (self.angle-self.angle_min) % self.angle_step != 0:
self.angle = 0
@staticmethod
def grad2rad(grad):
return grad*pi/200
def angleMin(self):
return self.grad2rad(self.angle_min)
def angleMax(self):
return self.grad2rad(self.angle_max)
def angleStep(self):
return self.grad2rad(self.angle_step)
def currentAngle(self):
return self.grad2rad(self.angle)
def angleCount(self):
return (self.angle_max-self.angle_min)//self.angle_step
def angleIndex(self):
if self.angle_step > 0:
return (self.angle-self.angle_min)//self.angle_step
return (self.angle-self.angle_max)//self.angle_step
def rangeFrom(self, index):
return (index+1)*self.max_range/self.samples
def configureTransducer(self, gain, frequency, speed_of_sound, max_range):
self.gain = gain
self.frequency = frequency
self.samples = int(min(self.firmwareMaxSamples,2*max_range/(self.firmwareMinSamplePeriod*speed_of_sound*self.samplePeriodTickDuration)))
self.sample_period = int((2.*max_range)/
(self.samples*speed_of_sound*self.samplePeriodTickDuration));
#* Per firmware engineer:
#* 1. Starting point is TxPulse in usec = ((one-way range in metres) * 8000) / (Velocity of sound in metres
#* per second)
#* 2. Then check that TxPulse is wide enough for currently selected sample interval in usec, i.e.,
#* if TxPulse < (2.5 * sample interval) then TxPulse = (2.5 * sample interval)
#* 3. Perform limit checking
#1
one_way_duration_us = (8000.*max_range)/speed_of_sound
# 2 (transmit duration is microseconds, sample_period_ns is nanoseconds)
sample_period_ns = self.sample_period * self.samplePeriodTickDuration
self.transmit_duration = max(2.5*sample_period_ns/1000, one_way_duration_us)
# 3 ensure bounds
if self.transmit_duration < self.firmwareMinTransmitDuration:
self.transmit_duration = self.firmwareMinTransmitDuration
else:
max_duration = min(self.firmwareMaxTransmitDuration, sample_period_ns*self.maxDurationRatio)
if self.transmit_duration > max_duration:
self.transmit_duration = max_duration
self.transmit_duration = int(self.transmit_duration)
def transmitDuration(self):
# microseconds to seconds
return self.transmit_duration/1e6
def updateAngle(self):
self.angle += self.angle_step
if self.angle_min == -200:
# full scan
end_turn = self.angle + self.angle_step > self.angle_max
if self.angle > self.angle_max:
self.angle = self.angle_min
return end_turn
# sector scan, check near end of sector
if self.angle + self.angle_step >= self.angle_max or self.angle + self.angle_step <= self.angle_min:
self.angle_step *= -1
return True
return False
def read(self):
# update angle before transmit
end_turn = self.updateAngle()
if self.sonar is not None:
print(f'transmit: {self.transmit_duration}')
self.sonar.control_transducer(
0, # reserved
self.gain,
self.angle,
self.transmit_duration,
self.sample_period,
self.frequency,
self.samples,
1,
0)
self.sonar.wait_message([definitions.PING360_DEVICE_DATA, definitions.COMMON_NACK], 4.0)
self.data = bytearray(self.sonar._data)
return (len(self.data) != 0, end_turn)
# emulated sonar
from random import randint
from time import sleep
self.data = [0 for _ in range(self.samples)]
scale = 5*abs((self.angle+400) % 400 - 200)
for i in range(self.samples):
if randint(self.samples,2*self.samples) < 1.1*i + scale:
self.data[i] = randint(220, 255)
# emulate transmit duration in microseconds
#sleep(self.transmit_duration/1000000)
return (True, end_turn)
# handles an angular sector of the image
class Bound:
radius = 0
def __init__(self, x, tm, tM):
self.x = x
if type(tM) == int:
self.low = Bound.clamp(tm*x)
self.up = int(tM*sqrt(Bound.radius**2-x**2-1))
else:
self.low = Bound.clamp(x*tm)
self.up = Bound.clamp(x*tM)
if self.up**2 + x**2 > Bound.radius**2:
self.up = int(sign(self.up) * sqrt(Bound.radius**2-x**2-1))
if self.up < self.low:
self.low,self.up = self.up,self.low
#staticmethod
def clamp(coord):
if coord < -Bound.radius+1:
return -Bound.radius+1
elif coord > Bound.radius-1:
return Bound.radius-1
return int(coord)
class Sector:
def __init__(self):
self.dr = None
def configure(self, samples, radius):
self.dr = radius/samples
Bound.radius = radius
def init(self, angle, step):
angle_min = angle-step/2
angle_max = angle+step/2
xmin, xmax,same_side = self.xLimits(angle_min, angle_max)
tm, tM = tan(angle_min), tan(angle_max)
self.bounds = []
if same_side:
# same side
if abs(tm) > abs(tM):
tm,tM = tM,tm
for x in range(xmin, xmax+1):
self.bounds.append(Bound(x,tm,tM))
else:
f = 1 if abs(angle-pi/2) < abs(angle+pi/2) else -1
if f == -1:
tm,tM = tM,tm
for x in range(xmin, 0):
self.bounds.append(Bound(x, tM,f))
for x in range(0, xmax+1):
self.bounds.append(Bound(x, tm,f))
self.cur = -1
def xLimits(self, angle_min, angle_max):
cm = cos(angle_min)
cM = cos(angle_max)
if cM < cm:
cm,cM = cM,cm
if cm*cM > 0:
if cM < 0:
cM = 0
else:
cm = 0
return Bound.clamp(round(Bound.radius*cm)), Bound.clamp(round(Bound.radius*cM)), cm*cM >= 0
def nextPoint(self, x, y):
if self.cur == -1:
self.cur = 0
x = self.bounds[0].x
y = self.bounds[0].low
elif y < self.bounds[self.cur].up:
y += 1
else:
self.cur += 1
if self.cur == len(self.bounds):
return False, 0, 0, 0
x = self.bounds[self.cur].x
y = self.bounds[self.cur].low
return True, x, y, int(round(sqrt(x*x+y*y)/self.dr))
| [
[
[
56,
63
],
[
510,
517
]
],
[
[
82,
84
],
[
2573,
2575
],
[
8339,
8341
],
[
8357,
8359
]
],
[
[
86,
90
],
[
7064,
7068
],
[
7300,
7304
],
[
9497,
9501
]
],
[
[
92,
95
],
[
8026,
8029
],
[
8042,
8045
]
],
[
[
97,
100
],
[
8739,
8742
],
[
8767,
8770
]
],
[
[
102,
106
],
[
7284,
7288
]
],
[
[
126,
137
],
[
6177,
6188
],
[
6210,
6221
]
],
[
[
145,
159
]
],
[
[
6890,
6895
],
[
7017,
7022
],
[
7069,
7074
],
[
7131,
7136
],
[
7171,
7176
],
[
7237,
7242
],
[
7305,
7310
],
[
7499,
7504
],
[
7535,
7540
],
[
7571,
7576
],
[
7606,
7611
],
[
7813,
7818
],
[
8278,
8283
],
[
8527,
8532
],
[
8617,
8622
],
[
8952,
8957
],
[
8970,
8975
],
[
8989,
8994
],
[
9007,
9012
]
],
[
[
7666,
7672
]
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.