repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 22
values | size
stringlengths 4
7
| content
stringlengths 626
1.05M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 5.21
99.9
| line_max
int64 12
999
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
pansapiens/mytardis | tardis/apps/mx_views/views.py | 3 | 2892 | from django.conf import settings
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.http import HttpResponse
from tardis.tardis_portal.auth import decorators as authz
from tardis.tardis_portal.models import Dataset
from tardis.tardis_portal.shortcuts import get_experiment_referer
from tardis.tardis_portal.shortcuts import render_response_index
@authz.dataset_access_required
def view_full_dataset(request, dataset_id):
"""Displays a MX Dataset and associated information.
Shows a full (hundreds of images) dataset its metadata and a list
of associated files with the option to show metadata of each file
and ways to download those files. With write permission this page
also allows uploading and metadata editing.
Settings for this view:
INSTALLED_APPS += ("tardis.apps.mx_views",)
DATASET_VIEWS = [("http://synchrotron.org.au/views/dataset/full",
"tardis.apps.mx_views.views.view_full_dataset"),]
"""
dataset = Dataset.objects.get(id=dataset_id)
def get_datafiles_page():
# pagination was removed by someone in the interface but not here.
# need to fix.
pgresults = 100
paginator = Paginator(dataset.datafile_set.all(), pgresults)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
return paginator.page(page)
except (EmptyPage, InvalidPage):
return paginator.page(paginator.num_pages)
display_images = dataset.get_images()
image_count = len(display_images)
if image_count > 4:
# take 4 evenly spaced images from the set
display_images = display_images[0::image_count / 4][:4]
upload_method = getattr(settings, "UPLOAD_METHOD", "uploadify")
c = {
'dataset': dataset,
'datafiles': get_datafiles_page(),
'parametersets': dataset.getParameterSets()
.exclude(schema__hidden=True),
'has_download_permissions':
authz.has_dataset_download_access(request, dataset_id),
'has_write_permissions':
authz.has_dataset_write(request, dataset_id),
'from_experiment': \
get_experiment_referer(request, dataset_id),
'other_experiments': \
authz.get_accessible_experiments_for_dataset(request, dataset_id),
'display_images': display_images,
'upload_method': upload_method,
'default_organization':
getattr(settings, 'DEFAULT_ARCHIVE_ORGANIZATION', 'classic'),
'default_format':
getattr(settings, 'DEFAULT_ARCHIVE_FORMATS', ['tgz', 'tar'])[0]
}
return HttpResponse(render_response_index(
request, 'mx_views/view_full_dataset.html', c))
| bsd-3-clause | -8,726,488,663,588,781,000 | 37.052632 | 79 | 0.65491 | false |
twidi/pytyrant | pytyrant.py | 1 | 14361 | """Pure python implementation of the binary Tokyo Tyrant 1.1.17 protocol
Tokyo Cabinet <http://tokyocabinet.sourceforge.net/> is a "super hyper ultra
database manager" written and maintained by Mikio Hirabayashi and released
under the LGPL.
Tokyo Tyrant is the de facto database server for Tokyo Cabinet written and
maintained by the same author. It supports a REST HTTP protocol, memcached,
and its own simple binary protocol. This library implements the full binary
protocol for the Tokyo Tyrant 1.1.17 in pure Python as defined here::
http://tokyocabinet.sourceforge.net/tyrantdoc/
Typical usage is with the PyTyrant class which provides a dict-like wrapper
for the raw Tyrant protocol::
>>> import pytyrant
>>> t = pytyrant.PyTyrant.open('127.0.0.1', 1978)
>>> t['__test_key__'] = 'foo'
>>> t.concat('__test_key__', 'bar')
>>> print t['__test_key__']
foobar
>>> del t['__test_key__']
"""
import math
import socket
import struct
import UserDict
__version__ = '1.1.17'
__all__ = [
'Tyrant', 'TyrantError', 'PyTyrant',
'RDBMONOULOG', 'RDBXOLCKREC', 'RDBXOLCKGLB',
]
class TyrantError(Exception):
pass
DEFAULT_PORT = 1978
MAGIC = 0xc8
RDBMONOULOG = 1 << 0
RDBXOLCKREC = 1 << 0
RDBXOLCKGLB = 1 << 1
class C(object):
"""
Tyrant Protocol constants
"""
put = 0x10
putkeep = 0x11
putcat = 0x12
putshl = 0x13
putnr = 0x18
out = 0x20
get = 0x30
mget = 0x31
vsiz = 0x38
iterinit = 0x50
iternext = 0x51
fwmkeys = 0x58
addint = 0x60
adddouble = 0x61
ext = 0x68
sync = 0x70
vanish = 0x71
copy = 0x72
restore = 0x73
setmst = 0x78
rnum = 0x80
size = 0x81
stat = 0x88
misc = 0x90
def _t0(code):
return [chr(MAGIC) + chr(code)]
def _t1(code, key):
return [
struct.pack('>BBI', MAGIC, code, len(key)),
key,
]
def _t1FN(code, func, opts, args):
outlst = [
struct.pack('>BBIII', MAGIC, code, len(func), opts, len(args)),
func,
]
for k in args:
outlst.extend([struct.pack('>I', len(k)), k])
return outlst
def _t1R(code, key, msec):
return [
struct.pack('>BBIQ', MAGIC, code, len(key), msec),
key,
]
def _t1M(code, key, count):
return [
struct.pack('>BBII', MAGIC, code, len(key), count),
key,
]
def _tN(code, klst):
outlst = [struct.pack('>BBI', MAGIC, code, len(klst))]
for k in klst:
outlst.extend([struct.pack('>I', len(k)), k])
return outlst
def _t2(code, key, value):
return [
struct.pack('>BBII', MAGIC, code, len(key), len(value)),
key,
value,
]
def _t2W(code, key, value, width):
return [
struct.pack('>BBIII', MAGIC, code, len(key), len(value), width),
key,
value,
]
def _t3F(code, func, opts, key, value):
return [
struct.pack('>BBIIII', MAGIC, code, len(func), opts, len(key), len(value)),
func,
key,
value,
]
def _tDouble(code, key, integ, fract):
return [
struct.pack('>BBIQQ', MAGIC, code, len(key), integ, fract),
key,
]
def socksend(sock, lst):
sock.sendall(''.join(lst))
def sockrecv(sock, bytes):
d = ''
while len(d) < bytes:
c = sock.recv(min(8192, bytes - len(d)))
if not c:
raise TyrantError('Connection closed')
d += c
return d
def socksuccess(sock):
fail_code = ord(sockrecv(sock, 1))
if fail_code:
raise TyrantError(fail_code)
def socklen(sock):
return struct.unpack('>I', sockrecv(sock, 4))[0]
def socklong(sock):
return struct.unpack('>Q', sockrecv(sock, 8))[0]
def sockstr(sock):
return sockrecv(sock, socklen(sock))
def sockdouble(sock):
intpart, fracpart = struct.unpack('>QQ', sockrecv(sock, 16))
return intpart + (fracpart * 1e-12)
def sockstrpair(sock):
klen = socklen(sock)
vlen = socklen(sock)
k = sockrecv(sock, klen)
v = sockrecv(sock, vlen)
return k, v
class PyTyrant(object, UserDict.DictMixin):
"""
Dict-like proxy for a Tyrant instance
"""
@classmethod
def open(cls, *args, **kw):
return cls(Tyrant.open(*args, **kw))
def __init__(self, t):
self.t = t
def __repr__(self):
# The __repr__ for UserDict.DictMixin isn't desirable
# for a large KV store :)
return object.__repr__(self)
def has_key(self, key):
return key in self
def __contains__(self, key):
try:
self.t.vsiz(key)
except TyrantError:
return False
else:
return True
def setdefault(self, key, value):
try:
self.t.putkeep(key, value)
except TyrantError:
return self[key]
return value
def __setitem__(self, key, value):
self.t.put(key, value)
def __getitem__(self, key):
try:
return self.t.get(key)
except TyrantError:
raise KeyError(key)
def __delitem__(self, key):
try:
self.t.out(key)
except TyrantError:
raise KeyError(key)
def __iter__(self):
return self.iterkeys()
def iterkeys(self):
self.t.iterinit()
try:
while True:
yield self.t.iternext()
except TyrantError:
pass
def keys(self):
return list(self.iterkeys())
def __len__(self):
return self.t.rnum()
def clear(self):
self.t.vanish()
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'):
self.multi_set(other.iteritems())
elif hasattr(other, 'keys'):
self.multi_set([(k, other[k]) for k in other.keys()])
else:
self.multi_set(other)
if kwargs:
self.update(kwargs)
def multi_del(self, keys, no_update_log=False):
opts = (no_update_log and RDBMONOULOG or 0)
if not isinstance(keys, (list, tuple)):
keys = list(keys)
self.t.misc("outlist", opts, keys)
def multi_get(self, keys, no_update_log=False):
opts = (no_update_log and RDBMONOULOG or 0)
if not isinstance(keys, (list, tuple)):
keys = list(keys)
rval = self.t.misc("getlist", opts, keys)
if len(rval) <= len(keys):
# 1.1.10 protocol, may return invalid results
if len(rval) < len(keys):
raise KeyError("Missing a result, unusable response in 1.1.10")
return rval
# 1.1.11 protocol returns interleaved key, value list
d = dict((rval[i], rval[i + 1]) for i in xrange(0, len(rval), 2))
return map(d.get, keys)
def multi_set(self, items, no_update_log=False):
opts = (no_update_log and RDBMONOULOG or 0)
lst = []
for k, v in items:
lst.extend((k, v))
self.t.misc("putlist", opts, lst)
def call_func(self, func, key, value, record_locking=False, global_locking=False):
opts = (
(record_locking and RDBXOLCKREC or 0) |
(global_locking and RDBXOLCKGLB or 0))
return self.t.ext(func, opts, key, value)
def get_size(self, key):
try:
return self.t.vsiz(key)
except TyrantError:
raise KeyError(key)
def get_stats(self):
return dict(l.split('\t', 1) for l in self.t.stat().splitlines() if l)
def prefix_keys(self, prefix, maxkeys=None):
if maxkeys is None:
maxkeys = len(self)
return self.t.fwmkeys(prefix, maxkeys)
def concat(self, key, value, width=None):
if width is None:
self.t.putcat(key, value)
else:
self.t.putshl(key, value, width)
def sync(self):
self.t.sync()
def close(self):
self.t.close()
class Tyrant(object):
@classmethod
def open(cls, host='127.0.0.1', port=DEFAULT_PORT, timeout=3.0):
sock = socket.socket()
sock.settimeout(timeout)
sock.connect((host, port))
sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
return cls(sock)
def __init__(self, sock):
self.sock = sock
def close(self):
self.sock.close()
def put(self, key, value):
"""Unconditionally set key to value
"""
socksend(self.sock, _t2(C.put, key, value))
socksuccess(self.sock)
def putkeep(self, key, value):
"""Set key to value if key does not already exist
"""
socksend(self.sock, _t2(C.putkeep, key, value))
socksuccess(self.sock)
def putcat(self, key, value):
"""Append value to the existing value for key, or set key to
value if it does not already exist
"""
socksend(self.sock, _t2(C.putcat, key, value))
socksuccess(self.sock)
def putshl(self, key, value, width):
"""Equivalent to::
self.putcat(key, value)
self.put(key, self.get(key)[-width:])
"""
socksend(self.sock, _t2W(C.putshl, key, value, width))
socksuccess(self.sock)
def putnr(self, key, value):
"""Set key to value without waiting for a server response
"""
socksend(self.sock, _t2(C.putnr, key, value))
def out(self, key):
"""Remove key from server
"""
socksend(self.sock, _t1(C.out, key))
socksuccess(self.sock)
def get(self, key):
"""Get the value of a key from the server
"""
socksend(self.sock, _t1(C.get, key))
socksuccess(self.sock)
return sockstr(self.sock)
def _mget(self, klst):
socksend(self.sock, _tN(C.mget, klst))
socksuccess(self.sock)
numrecs = socklen(self.sock)
for i in xrange(numrecs):
k, v = sockstrpair(self.sock)
yield k, v
def mget(self, klst):
"""Get key,value pairs from the server for the given list of keys
"""
return list(self._mget(klst))
def vsiz(self, key):
"""Get the size of a value for key
"""
socksend(self.sock, _t1(C.vsiz, key))
socksuccess(self.sock)
return socklen(self.sock)
def iterinit(self):
"""Begin iteration over all keys of the database
"""
socksend(self.sock, _t0(C.iterinit))
socksuccess(self.sock)
def iternext(self):
"""Get the next key after iterinit
"""
socksend(self.sock, _t0(C.iternext))
socksuccess(self.sock)
return sockstr(self.sock)
def _fwmkeys(self, prefix, maxkeys):
socksend(self.sock, _t1M(C.fwmkeys, prefix, maxkeys))
socksuccess(self.sock)
numkeys = socklen(self.sock)
for i in xrange(numkeys):
yield sockstr(self.sock)
def fwmkeys(self, prefix, maxkeys):
"""Get up to the first maxkeys starting with prefix
"""
return list(self._fwmkeys(prefix, maxkeys))
def addint(self, key, num):
socksend(self.sock, _t1M(C.addint, key, num))
socksuccess(self.sock)
return socklen(self.sock)
def adddouble(self, key, num):
fracpart, intpart = math.modf(num)
fracpart, intpart = int(fracpart * 1e12), int(intpart)
socksend(self.sock, _tDouble(C.adddouble, key, fracpart, intpart))
socksuccess(self.sock)
return sockdouble(self.sock)
def ext(self, func, opts, key, value):
# tcrdbext opts are RDBXOLCKREC, RDBXOLCKGLB
"""Call func(key, value) with opts
opts is a bitflag that can be RDBXOLCKREC for record locking
and/or RDBXOLCKGLB for global locking"""
socksend(self.sock, _t3F(C.ext, func, opts, key, value))
socksuccess(self.sock)
return sockstr(self.sock)
def sync(self):
"""Synchronize the database
"""
socksend(self.sock, _t0(C.sync))
socksuccess(self.sock)
def vanish(self):
"""Remove all records
"""
socksend(self.sock, _t0(C.vanish))
socksuccess(self.sock)
def copy(self, path):
"""Hot-copy the database to path
"""
socksend(self.sock, _t1(C.copy, path))
socksuccess(self.sock)
def restore(self, path, msec):
"""Restore the database from path at timestamp (in msec)
"""
socksend(self.sock, _t1R(C.copy, path, msec))
socksuccess(self.sock)
def setmst(self, host, port):
"""Set master to host:port
"""
socksend(self.sock, _t1M(C.setmst, host, port))
socksuccess(self.sock)
def rnum(self):
"""Get the number of records in the database
"""
socksend(self.sock, _t0(C.rnum))
socksuccess(self.sock)
return socklong(self.sock)
def size(self):
"""Get the size of the database
"""
socksend(self.sock, _t0(C.size))
socksuccess(self.sock)
return socklong(self.sock)
def stat(self):
"""Get some statistics about the database
"""
socksend(self.sock, _t0(C.stat))
socksuccess(self.sock)
return sockstr(self.sock)
def _misc(self, func, opts, args):
# tcrdbmisc opts are RDBMONOULOG
socksend(self.sock, _t1FN(C.misc, func, opts, args))
try:
socksuccess(self.sock)
finally:
numrecs = socklen(self.sock)
for i in xrange(numrecs):
yield sockstr(self.sock)
def misc(self, func, opts, args):
"""All databases support "putlist", "outlist", and "getlist".
"putlist" is to store records. It receives keys and values one after the other, and returns an empty list.
"outlist" is to remove records. It receives keys, and returns an empty list.
"getlist" is to retrieve records. It receives keys, and returns values.
Table database supports "setindex", "search", "genuid".
opts is a bitflag that can be RDBMONOULOG to prevent writing to the update log
"""
return list(self._misc(func, opts, args))
def main():
import doctest
doctest.testmod()
if __name__ == '__main__':
main()
| mit | -5,985,833,604,781,468,000 | 25.110909 | 114 | 0.572871 | false |
HonzaKral/curator | test_curator/integration/test_time_based.py | 1 | 1872 | from datetime import datetime, timedelta
import curator
from . import CuratorTestCase
class TestTimeBasedDeletion(CuratorTestCase):
def test_curator_will_properly_delete_indices(self):
self.create_indices(10)
self.run_curator(delete_older=3)
mtd = self.client.cluster.state(index=self.args['prefix'] + '*', metric='metadata')
self.assertEquals(4, len(mtd['metadata']['indices'].keys()))
def test_curator_will_properly_delete_hourly_indices(self):
self.create_indices(10, 'hours')
self.run_curator(delete_older=3, time_unit='hours')
mtd = self.client.cluster.state(index=self.args['prefix'] + '*', metric='metadata')
self.assertEquals(4, len(mtd['metadata']['indices'].keys()))
class TestFindExpiredIndices(CuratorTestCase):
def test_find_indices_ignores_indices_with_different_prefix_or_time_unit(self):
self.create_index('logstash-2012.01.01') # wrong precision
self.create_index('not-logstash-2012.01.01.00') # wrong prefix
self.create_index('logstash-2012.01.01.00')
expired = list(curator.find_expired_indices(self.client, 'hours', 1))
self.assertEquals(1, len(expired))
self.assertEquals('logstash-2012.01.01.00', expired[0][0])
def test_find_reports_correct_time_interval_from_cutoff(self):
self.create_index('l-2014.01.01')
self.create_index('l-2014.01.02')
# yesterday is always save since we reset to mignight and do <, not <=
self.create_index('l-2014.01.03')
expired = list(curator.find_expired_indices(self.client, 'days', 1,
utc_now=datetime(2014, 1, 4, 3, 45, 50), prefix='l-'))
self.assertEquals(
[
(u'l-2014.01.01', timedelta(2)),
(u'l-2014.01.02', timedelta(1))
],
expired
)
| apache-2.0 | -1,606,032,251,548,790,800 | 40.6 | 91 | 0.634615 | false |
chaubold/hytra | tests/core/test_conflictingsegmentations.py | 1 | 5839 | from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
import logging
from hytra.core.ilastik_project_options import IlastikProjectOptions
from hytra.jst.conflictingsegmentsprobabilitygenerator import ConflictingSegmentsProbabilityGenerator
from hytra.core.ilastikhypothesesgraph import IlastikHypothesesGraph
from hytra.core.fieldofview import FieldOfView
try:
import multiHypoTracking_with_cplex as mht
except ImportError:
try:
import multiHypoTracking_with_gurobi as mht
except ImportError:
mht = None
import dpct
def constructFov(shape, t0, t1, scale=[1, 1, 1]):
[xshape, yshape, zshape] = shape
[xscale, yscale, zscale] = scale
fov = FieldOfView(t0, 0, 0, 0, t1, xscale * (xshape - 1), yscale * (yshape - 1),
zscale * (zshape - 1))
return fov
# def test_twoSegmentations():
# # set up ConflictingSegmentsProbabilityGenerator
# ilpOptions = IlastikProjectOptions()
# ilpOptions.divisionClassifierPath = None
# ilpOptions.divisionClassifierFilename = None
# ilpOptions.rawImageFilename = 'tests/multiSegmentationHypothesesTestDataset/Raw.h5'
# ilpOptions.rawImagePath = 'exported_data'
# ilpOptions.rawImageAxes = 'txyzc'
# ilpOptions.labelImageFilename = 'tests/multiSegmentationHypothesesTestDataset/segmentation.h5'
# ilpOptions.objectCountClassifierFilename = 'tests/multiSegmentationHypothesesTestDataset/tracking.ilp'
# additionalLabelImageFilenames = ['tests/multiSegmentationHypothesesTestDataset/segmentationAlt.h5']
# additionalLabelImagePaths = [ilpOptions.labelImagePath]
# probabilityGenerator = ConflictingSegmentsProbabilityGenerator(
# ilpOptions,
# additionalLabelImageFilenames,
# additionalLabelImagePaths,
# useMultiprocessing=False,
# verbose=False)
# probabilityGenerator.fillTraxels(usePgmlink=False)
# assert(len(probabilityGenerator.TraxelsPerFrame[0]) == 4)
# assert(len(probabilityGenerator.TraxelsPerFrame[1]) == 3)
# assert(len(probabilityGenerator.TraxelsPerFrame[2]) == 3)
# assert(len(probabilityGenerator.TraxelsPerFrame[3]) == 4)
# filenamesPerTraxel = [t.segmentationFilename for t in probabilityGenerator.TraxelsPerFrame[3].values()]
# idsPerTraxel = [t.idInSegmentation for t in probabilityGenerator.TraxelsPerFrame[3].values()]
# assert(idsPerTraxel.count(1) == 2)
# assert(idsPerTraxel.count(2) == 2)
# assert(filenamesPerTraxel.count('tests/multiSegmentationHypothesesTestDataset/segmentation.h5') == 2)
# assert(filenamesPerTraxel.count('tests/multiSegmentationHypothesesTestDataset/segmentationAlt.h5') == 2)
# # build hypotheses graph, check that conflicting traxels are properly detected
# fieldOfView = constructFov(probabilityGenerator.shape,
# probabilityGenerator.timeRange[0],
# probabilityGenerator.timeRange[1],
# [probabilityGenerator.x_scale,
# probabilityGenerator.y_scale,
# probabilityGenerator.z_scale])
# hypotheses_graph = IlastikHypothesesGraph(
# probabilityGenerator=probabilityGenerator,
# timeRange=probabilityGenerator.timeRange,
# maxNumObjects=1,
# numNearestNeighbors=2,
# fieldOfView=fieldOfView,
# withDivisions=False,
# divisionThreshold=0.1
# )
# assert(hypotheses_graph.countNodes() == 14)
# assert(hypotheses_graph.countArcs() == 23)
# assert(hypotheses_graph._graph.node[(0, 1)]['traxel'].conflictingTraxelIds == [3])
# assert(hypotheses_graph._graph.node[(0, 3)]['traxel'].conflictingTraxelIds == [1])
# assert(hypotheses_graph._graph.node[(0, 2)]['traxel'].conflictingTraxelIds == [4])
# assert(hypotheses_graph._graph.node[(0, 4)]['traxel'].conflictingTraxelIds == [2])
# assert(hypotheses_graph._graph.node[(1, 1)]['traxel'].conflictingTraxelIds == [2, 3])
# assert(hypotheses_graph._graph.node[(1, 2)]['traxel'].conflictingTraxelIds == [1])
# assert(hypotheses_graph._graph.node[(1, 3)]['traxel'].conflictingTraxelIds == [1])
# # track, but check that the right exclusion constraints are present
# hypotheses_graph.insertEnergies()
# trackingGraph = hypotheses_graph.toTrackingGraph()
# assert(len(trackingGraph.model['exclusions']) == 8)
# for exclusionSet in trackingGraph.model['exclusions']:
# assert(len(exclusionSet) == 2)
# # use multiHypoTracking, insert exclusion constraints!
# if mht is not None:
# result = mht.track(trackingGraph.model, {"weights": [10, 10, 500, 500]})
# else:
# return
# # standard dpct cannot handle exclusion constraints yet
# result = dpct.trackFlowBased(trackingGraph.model, {"weights": [10, 10, 500, 500]})
# hypotheses_graph.insertSolution(result)
# # hypotheses_graph.computeLineage()
# numActivePerFrame = {}
# for node in hypotheses_graph.nodeIterator():
# timeframe = node[0]
# if 'value' in hypotheses_graph._graph.node[node]:
# value = hypotheses_graph._graph.node[node]['value']
# else:
# value = 0
# numActivePerFrame.setdefault(timeframe, []).append(value)
# for _, v in numActivePerFrame.items():
# assert(sum(v) == 2)
# edgeFlow = 0
# for edge in hypotheses_graph.arcIterator():
# if 'value' in hypotheses_graph._graph.edge[edge[0]][edge[1]]:
# edgeFlow += hypotheses_graph._graph.edge[edge[0]][edge[1]]['value']
# assert(edgeFlow == 6)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
test_twoSegmentations()
| mit | -4,230,047,409,174,636,000 | 43.572519 | 125 | 0.679911 | false |
makelove/OpenCV-Python-Tutorial | ch21-轮廓Contours/21-findContour.py | 1 | 1096 | # -*- coding: utf-8 -*-
import numpy as np
import cv2
# im = cv2.imread('test.jpg')#
# im = cv2.imread('poker5hearts.jpg')#
# im = cv2.imread('../data/black-white-rect.png')#contour.jpg #
im = cv2.imread('../data/chessboard.jpeg')
imgray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
cv2.imshow("imgray", imgray)
#需要注意的是cv2.findContours()函数接受的参数为二值图,即黑白的(不是灰度图)
# 所以读取的图像要先转成灰度的,再转成二值图
# ret, thresh = cv2.threshold(imgray, 0, 25, 0)
# ret, thresh = cv2.threshold(imgray, 0, 100, 0)
ret, thresh = cv2.threshold(src=imgray, thresh=127, maxval=255, type=cv2.THRESH_BINARY)#src, thresh, maxval, type
cv2.imshow("thresh", thresh)
#轮廓提取模式 Contour_Retrieval_Mode
image, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
print("contours size: ", len(contours))
img = cv2.drawContours(im, contours, -1, (0,255,0), 3)
# img = cv2.drawContours(im, contours, 3, (255, 0, 0), 3)
cv2.namedWindow("contour.jpg", 0)
cv2.imshow("contour.jpg", img)
cv2.waitKey(0)
| mit | -2,430,169,206,289,489,000 | 31.8 | 113 | 0.703252 | false |
llllllllll/codetransformer | codetransformer/tests/test_code.py | 1 | 5761 | from dis import dis
from io import StringIO
from itertools import product, chain
import random
import sys
import pytest
from codetransformer.code import Code, Flag, pycode
from codetransformer.instructions import LOAD_CONST, LOAD_FAST, uses_free
@pytest.fixture(scope='module')
def sample_flags(request):
random.seed(8025816322119661921) # ayy lmao
nflags = len(Flag.__members__)
return tuple(
dict(zip(Flag.__members__.keys(), case)) for case in chain(
random.sample(list(product((True, False), repeat=nflags)), 1000),
[[True] * nflags],
[[False] * nflags],
)
)
def test_lnotab_roundtrip():
# DO NOT ADD EXTRA LINES HERE
def f(): # pragma: no cover
a = 1
b = 2
c = 3
d = 4
a, b, c, d
start_line = test_lnotab_roundtrip.__code__.co_firstlineno + 3
lines = [start_line + n for n in range(5)]
code = Code.from_pycode(f.__code__)
lnotab = code.lnotab
assert lnotab.keys() == set(lines)
assert isinstance(lnotab[lines[0]], LOAD_CONST)
assert lnotab[lines[0]].arg == 1
assert isinstance(lnotab[lines[1]], LOAD_CONST)
assert lnotab[lines[1]].arg == 2
assert isinstance(lnotab[lines[2]], LOAD_CONST)
assert lnotab[lines[2]].arg == 3
assert isinstance(lnotab[lines[3]], LOAD_CONST)
assert lnotab[lines[3]].arg == 4
assert isinstance(lnotab[lines[4]], LOAD_FAST)
assert lnotab[lines[4]].arg == 'a'
assert f.__code__.co_lnotab == code.py_lnotab == code.to_pycode().co_lnotab
def test_lnotab_really_dumb_whitespace():
ns = {}
exec('def f():\n lol = True' + '\n' * 1024 + ' wut = True', ns)
f = ns['f']
code = Code.from_pycode(f.__code__)
lines = [2, 1026]
lnotab = code.lnotab
assert lnotab.keys() == set(lines)
assert isinstance(lnotab[lines[0]], LOAD_CONST)
assert lnotab[lines[0]].arg
assert isinstance(lnotab[lines[1]], LOAD_CONST)
assert lnotab[lines[1]].arg
assert f.__code__.co_lnotab == code.py_lnotab == code.to_pycode().co_lnotab
def test_flag_packing(sample_flags):
for flags in sample_flags:
assert Flag.unpack(Flag.pack(**flags)) == flags
def test_flag_unpack_too_big():
assert all(Flag.unpack(Flag.max).values())
with pytest.raises(ValueError):
Flag.unpack(Flag.max + 1)
def test_flag_max():
assert Flag.pack(
CO_OPTIMIZED=True,
CO_NEWLOCALS=True,
CO_VARARGS=True,
CO_VARKEYWORDS=True,
CO_NESTED=True,
CO_GENERATOR=True,
CO_NOFREE=True,
CO_COROUTINE=True,
CO_ITERABLE_COROUTINE=True,
CO_FUTURE_DIVISION=True,
CO_FUTURE_ABSOLUTE_IMPORT=True,
CO_FUTURE_WITH_STATEMENT=True,
CO_FUTURE_PRINT_FUNCTION=True,
CO_FUTURE_UNICODE_LITERALS=True,
CO_FUTURE_BARRY_AS_BDFL=True,
CO_FUTURE_GENERATOR_STOP=True,
) == Flag.max
def test_flag_max_immutable():
with pytest.raises(AttributeError):
Flag.CO_OPTIMIZED.max = None
def test_code_multiple_varargs():
with pytest.raises(ValueError) as e:
Code(
(), (
'*args',
'*other',
),
)
assert str(e.value) == 'cannot specify *args more than once'
def test_code_multiple_kwargs():
with pytest.raises(ValueError) as e:
Code(
(), (
'**kwargs',
'**kwargs',
),
)
assert str(e.value) == 'cannot specify **kwargs more than once'
@pytest.mark.parametrize('cls', uses_free)
def test_dangling_var(cls):
instr = cls('dangling')
with pytest.raises(ValueError) as e:
Code((instr,))
assert (
str(e.value) ==
"Argument to %r is not in cellvars or freevars." % instr
)
def test_code_flags(sample_flags):
attr_map = {
'CO_NESTED': 'is_nested',
'CO_GENERATOR': 'is_generator',
'CO_COROUTINE': 'is_coroutine',
'CO_ITERABLE_COROUTINE': 'is_iterable_coroutine',
'CO_NEWLOCALS': 'constructs_new_locals',
}
for flags in sample_flags:
if sys.version_info < (3, 6):
codestring = b'd\x00\x00S' # return None
else:
codestring = b'd\x00S' # return None
code = Code.from_pycode(pycode(
argcount=0,
kwonlyargcount=0,
nlocals=2,
stacksize=0,
flags=Flag.pack(**flags),
codestring=codestring,
constants=(None,),
names=(),
varnames=('a', 'b'),
filename='',
name='',
firstlineno=0,
lnotab=b'',
))
assert code.flags == flags
for flag, attr in attr_map.items():
if flags[flag]:
assert getattr(code, attr)
@pytest.fixture
def abc_code():
a = LOAD_CONST('a')
b = LOAD_CONST('b')
c = LOAD_CONST('c') # not in instrs
code = Code((a, b), argnames=())
return (a, b, c), code
def test_instr_index(abc_code):
(a, b, c), code = abc_code
assert code.index(a) == 0
assert code.index(b) == 1
with pytest.raises(ValueError):
code.index(c)
def test_code_contains(abc_code):
(a, b, c), code = abc_code
assert a in code
assert b in code
assert c not in code
def test_code_dis(capsys):
@Code.from_pyfunc
def code(): # pragma: no cover
a = 1
b = 2
return a, b
buf = StringIO()
dis(code.to_pycode(), file=buf)
expected = buf.getvalue()
code.dis()
out, err = capsys.readouterr()
assert not err
assert out == expected
buf = StringIO()
code.dis(file=buf)
assert buf.getvalue() == expected
| gpl-2.0 | -3,448,919,409,673,295,000 | 24.95045 | 79 | 0.571949 | false |
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/locks/v2015_01_01/models/_management_lock_client_enums.py | 1 | 1363 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class LockLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The lock level of the management lock.
"""
NOT_SPECIFIED = "NotSpecified"
CAN_NOT_DELETE = "CanNotDelete"
READ_ONLY = "ReadOnly"
| mit | 8,379,623,869,956,419,000 | 37.942857 | 94 | 0.599413 | false |
jvs/sourcer | tests/test_salesforce.py | 1 | 5119 | from sourcer import Grammar
# This is work in progress.
# See: https://help.salesforce.com/articleView?id=customize_functions.htm&type=5
g = Grammar(r'''
```
import ast
```
start = Expression
Expression = OperatorPrecedence(
Atom | "(" >> Expression << ")",
Postfix(ArgumentList | FieldAccess),
Prefix("-" | "+" | "!"),
RightAssoc("^"),
LeftAssoc("*" | "/"),
LeftAssoc("+" | "-" | "&"),
NonAssoc("<=" | "<" | ">=" | ">"),
NonAssoc("!=" | "<>" | "==" | "="),
LeftAssoc("&&"),
LeftAssoc("||"),
)
class ArgumentList {
arguments: "(" >> (Expression /? ",") << ")"
}
class FieldAccess {
field: "." >> Word
}
Atom = Global | Identifier | Rational | Integer | String
class Global {
name: "$" >> Word
}
class Identifier {
name: Word
}
# ASK: What is the real syntax for these things?
Word = /[_a-zA-Z][_a-zA-Z0-9]*/
Rational = /(\d+\.\d*)|(\d*\.\d+)/ |> `float`
Integer = /\d+/ |> `int`
StringLiteral = /("([^"\\]|\\.)*")/ | /('([^'\\]|\\.)*')/
# For now, just use ast module to evaluate string literals.
class String {
value: StringLiteral |> `ast.literal_eval`
}
ignore /\s+/
''', include_source=True)
aliases = {
'=': '==',
'<>': '!=',
}
constants = {
'NULL': None,
'TRUE': True,
'FALSE': False,
}
# Incomplete collection of evaluators.
evaluators = {
'*': lambda x, y: x * y if x is not None and y is not None else None,
'/': lambda x, y: x / y if x is not None and y is not None else None,
'+': lambda x, y: x + y if x is not None and y is not None else None,
'-': lambda x, y: x - y if x is not None and y is not None else None,
'==': lambda x, y: x == y,
'!=': lambda x, y: x != y,
'&&': lambda x, y: x and y,
'||': lambda x, y: x or y,
'>': lambda x, y: x > y if x is not None and y is not None else False,
'<': lambda x, y: x < y if x is not None and y is not None else False,
'>=': lambda x, y: x >= y if x is not None and y is not None else False,
'<=': lambda x, y: x <= y if x is not None and y is not None else False,
'AND': lambda *a: all(a),
'CONTAINS': lambda x, y: str(y) in str(x) if x is not None else True,
'IF': lambda x, y, z: y if x else z,
'ISBLANK': lambda x: x is None,
'LOG': lambda x: log10(x) if x is not None else None,
'MAX': lambda *a: max(*a),
'MIN': lambda *a: min(*a),
'MOD': lambda x, y: (x % y) if x is not None and y is not None else None,
'NOT': lambda x: not(x),
'OR': lambda *a: any(a),
'SQRT': lambda x: sqrt(x) if x is not None else None,
'TEXT': lambda x: str(x),
}
def evaluate(node, bindings):
# Look up identifiers.
if isinstance(node, g.Identifier):
if node.name in bindings:
return bindings[node.name]
name = node.name.upper()
return bindings.get(name, name)
# Look up fields.
if isinstance(node, g.Postfix) and isinstance(node.operator, g.FieldAccess):
obj, field = node.left, node.operator.field
if hasattr(obj, field):
return getattr(obj, field)
elif isinstance(obj, dict):
return obj.get(field)
else:
return node
# Evaluate function calls and operators.
if isinstance(node, g.Infix):
x, func, y = node.left, node.operator, node.right
args = (x, y)
elif isinstance(node, g.Postfix) and isinstance(node.operator, g.ArgumentList):
func, args = node.left, node.operator.arguments
else:
return node
# Check if we're using an alias.
func = aliases.get(func, func)
if func in evaluators:
return evaluators[func](*args)
else:
return node
def run(formula, bindings=None):
updated_bindings = dict(constants)
updated_bindings.update(bindings or {})
tree = g.parse(formula)
return g.transform(tree, lambda node: evaluate(node, updated_bindings))
def test_some_simple_formulas():
result = run('1 + 2 * 3')
assert result == 7
result = run('foo == bar && fiz == buz', bindings={
'foo': 1, 'bar': 1, 'fiz': 2, 'buz': 2,
})
assert result == True
result = run('foo == bar && fiz == buz', bindings={
'foo': 1, 'bar': 1, 'fiz': 2, 'buz': 3,
})
assert result == False
result = run('1 <= 2 && (false || true)')
assert result == True # Explicitly compare to True.
result = run('1 > 2 || (true && false)')
assert result == False # Explicitly compare to False.
result = run('foo != bar', bindings={'foo': 10, 'bar': 10})
assert not result
result = run('foo != bar', bindings={'foo': 1, 'bar': 2})
assert result
result = run('foo.bar', bindings={'foo': {'bar': 10}})
assert result == 10
result = run('foo.bar.baz', bindings={'foo': {'bar': {'baz': 100}}})
assert result == 100
result = run('MIN(20, 10, 30)')
assert result == 10
result = run('MIN(20, 10, 30) + MAX(11, 12, 13)')
assert result == 23
| mit | -7,452,110,781,552,088,000 | 27.126374 | 83 | 0.540535 | false |
lsaffre/timtools | timtools/sdoc/feeders.py | 1 | 1705 | ## Copyright 2003-2009 Luc Saffre
## This file is part of the TimTools project.
## TimTools is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## TimTools is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with TimTools; if not, see <http://www.gnu.org/licenses/>.
import re
def plain2xml(txt):
txt = txt.replace("&","&")
txt = txt.replace("<","<")
return txt
memocommands = (
( re.compile('\[url\s+(\S+)\s*(.*?)\]',re.DOTALL),
lambda m : '<b>'+m.group(2)+'</b> (<i>' + m.group(1)+ '</i>)'),
)
# urlfind =
# urlrepl = re.compile('<b>\2</b> (<u>\1</u>)')
# def urlrepl(m):
def memo2xml(txt):
txt = plain2xml(txt)
txt = txt.replace('[B]','<b>')
txt = txt.replace('[b]','</b>')
txt = txt.replace('[U]','<u>')
txt = txt.replace('[u]','</u>')
for find,repl in memocommands:
txt = re.sub(find,repl,txt)
return txt
def rst2xml(txt):
raise "doesn't work"
import docutils.parsers.rst
import docutils.utils
parser = docutils.parsers.rst.Parser()
doc = docutils.utils.new_document("feed")
parser.parse(txt, doc)
raise "and now?"
_feeders={
'xml' : lambda x : x,
'plain' : plain2xml,
'rst' : rst2xml,
'memo' : memo2xml,
}
def getFeeder(name):
return _feeders[name]
| bsd-2-clause | 5,341,080,861,915,417,000 | 27.416667 | 71 | 0.63871 | false |
souravbadami/zulip | zerver/views/home.py | 1 | 17065 | from __future__ import absolute_import
from typing import Any, List, Dict, Optional, Text
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse, HttpRequest
from django.shortcuts import redirect
from django.utils import translation
from django.utils.cache import patch_cache_control
from six.moves import zip_longest, zip, range
from zerver.decorator import zulip_login_required, process_client
from zerver.forms import ToSForm
from zerver.lib.realm_icon import realm_icon_url
from zerver.models import Message, UserProfile, Stream, Subscription, Huddle, \
Recipient, Realm, UserMessage, DefaultStream, RealmEmoji, RealmAlias, \
RealmFilter, PreregistrationUser, UserActivity, \
UserPresence, get_recipient, name_changes_disabled, email_to_username, \
list_of_domains_for_realm
from zerver.lib.events import do_events_register
from zerver.lib.actions import update_user_presence, do_change_tos_version, \
do_update_pointer, get_cross_realm_dicts, realm_user_count
from zerver.lib.avatar import avatar_url
from zerver.lib.i18n import get_language_list, get_language_name, \
get_language_list_for_templates
from zerver.lib.push_notifications import num_push_devices_for_user
from zerver.lib.streams import access_stream_by_name
from zerver.lib.utils import statsd, get_subdomain
from zproject.backends import password_auth_enabled
from zproject.jinja2 import render_to_response
import calendar
import datetime
import logging
import os
import re
import simplejson
import time
@zulip_login_required
def accounts_accept_terms(request):
# type: (HttpRequest) -> HttpResponse
if request.method == "POST":
form = ToSForm(request.POST)
if form.is_valid():
do_change_tos_version(request.user, settings.TOS_VERSION)
return redirect(home)
else:
form = ToSForm()
email = request.user.email
special_message_template = None
if request.user.tos_version is None and settings.FIRST_TIME_TOS_TEMPLATE is not None:
special_message_template = 'zerver/' + settings.FIRST_TIME_TOS_TEMPLATE
return render_to_response(
'zerver/accounts_accept_terms.html',
{'form': form,
'email': email,
'special_message_template': special_message_template},
request=request)
def approximate_unread_count(user_profile):
# type: (UserProfile) -> int
not_in_home_view_recipients = [sub.recipient.id for sub in
Subscription.objects.filter(
user_profile=user_profile, in_home_view=False)]
# TODO: We may want to exclude muted messages from this count.
# It was attempted in the past, but the original attempt
# was broken. When we re-architect muting, we may
# want to to revisit this (see git issue #1019).
return UserMessage.objects.filter(
user_profile=user_profile, message_id__gt=user_profile.pointer).exclude(
message__recipient__type=Recipient.STREAM,
message__recipient__id__in=not_in_home_view_recipients).exclude(
flags=UserMessage.flags.read).count()
def sent_time_in_epoch_seconds(user_message):
# type: (UserMessage) -> Optional[float]
# user_message is a UserMessage object.
if not user_message:
return None
# We have USE_TZ = True, so our datetime objects are timezone-aware.
# Return the epoch seconds in UTC.
return calendar.timegm(user_message.message.pub_date.utctimetuple())
def home(request):
# type: (HttpRequest) -> HttpResponse
if settings.DEVELOPMENT and os.path.exists('var/handlebars-templates/compile.error'):
response = render_to_response('zerver/handlebars_compilation_failed.html',
request=request)
response.status_code = 500
return response
if not settings.SUBDOMAINS_HOMEPAGE:
return home_real(request)
# If settings.SUBDOMAINS_HOMEPAGE, sends the user the landing
# page, not the login form, on the root domain
subdomain = get_subdomain(request)
if subdomain != "":
return home_real(request)
return render_to_response('zerver/hello.html',
request=request)
@zulip_login_required
def home_real(request):
# type: (HttpRequest) -> HttpResponse
# We need to modify the session object every two weeks or it will expire.
# This line makes reloading the page a sufficient action to keep the
# session alive.
request.session.modified = True
user_profile = request.user
# If a user hasn't signed the current Terms of Service, send them there
if settings.TERMS_OF_SERVICE is not None and settings.TOS_VERSION is not None and \
int(settings.TOS_VERSION.split('.')[0]) > user_profile.major_tos_version():
return accounts_accept_terms(request)
narrow = [] # type: List[List[Text]]
narrow_stream = None
narrow_topic = request.GET.get("topic")
if request.GET.get("stream"):
try:
narrow_stream_name = request.GET.get("stream")
(narrow_stream, ignored_rec, ignored_sub) = access_stream_by_name(
user_profile, narrow_stream_name)
narrow = [["stream", narrow_stream.name]]
except Exception:
logging.exception("Narrow parsing")
if narrow_stream is not None and narrow_topic is not None:
narrow.append(["topic", narrow_topic])
register_ret = do_events_register(user_profile, request.client,
apply_markdown=True, narrow=narrow)
user_has_messages = (register_ret['max_message_id'] != -1)
# Reset our don't-spam-users-with-email counter since the
# user has since logged in
if user_profile.last_reminder is not None:
user_profile.last_reminder = None
user_profile.save(update_fields=["last_reminder"])
# Brand new users get the tutorial
needs_tutorial = settings.TUTORIAL_ENABLED and \
user_profile.tutorial_status != UserProfile.TUTORIAL_FINISHED
first_in_realm = realm_user_count(user_profile.realm) == 1
# If you are the only person in the realm and you didn't invite
# anyone, we'll continue to encourage you to do so on the frontend.
prompt_for_invites = first_in_realm and \
not PreregistrationUser.objects.filter(referred_by=user_profile).count()
if user_profile.pointer == -1 and user_has_messages:
# Put the new user's pointer at the bottom
#
# This improves performance, because we limit backfilling of messages
# before the pointer. It's also likely that someone joining an
# organization is interested in recent messages more than the very
# first messages on the system.
register_ret['pointer'] = register_ret['max_message_id']
user_profile.last_pointer_updater = request.session.session_key
if user_profile.pointer == -1:
latest_read = None
else:
try:
latest_read = UserMessage.objects.get(user_profile=user_profile,
message__id=user_profile.pointer)
except UserMessage.DoesNotExist:
# Don't completely fail if your saved pointer ID is invalid
logging.warning("%s has invalid pointer %s" % (user_profile.email, user_profile.pointer))
latest_read = None
desktop_notifications_enabled = user_profile.enable_desktop_notifications
if narrow_stream is not None:
desktop_notifications_enabled = False
if user_profile.realm.notifications_stream:
notifications_stream = user_profile.realm.notifications_stream.name
else:
notifications_stream = ""
# Set default language and make it persist
default_language = register_ret['default_language']
url_lang = '/{}'.format(request.LANGUAGE_CODE)
if not request.path.startswith(url_lang):
translation.activate(default_language)
request.session[translation.LANGUAGE_SESSION_KEY] = default_language
# Pass parameters to the client-side JavaScript code.
# These end up in a global JavaScript Object named 'page_params'.
page_params = dict(
# Server settings.
share_the_love = settings.SHARE_THE_LOVE,
development_environment = settings.DEVELOPMENT,
debug_mode = settings.DEBUG,
test_suite = settings.TEST_SUITE,
poll_timeout = settings.POLL_TIMEOUT,
login_page = settings.HOME_NOT_LOGGED_IN,
server_uri = settings.SERVER_URI,
maxfilesize = settings.MAX_FILE_UPLOAD_SIZE,
max_avatar_file_size = settings.MAX_AVATAR_FILE_SIZE,
server_generation = settings.SERVER_GENERATION,
use_websockets = settings.USE_WEBSOCKETS,
save_stacktraces = settings.SAVE_FRONTEND_STACKTRACES,
# realm data.
# TODO: Move all of these data to register_ret and pull from there
realm_uri = user_profile.realm.uri,
password_auth_enabled = password_auth_enabled(user_profile.realm),
domains = list_of_domains_for_realm(user_profile.realm),
name_changes_disabled = name_changes_disabled(user_profile.realm),
mandatory_topics = user_profile.realm.mandatory_topics,
show_digest_email = user_profile.realm.show_digest_email,
realm_presence_disabled = user_profile.realm.presence_disabled,
is_zephyr_mirror_realm = user_profile.realm.is_zephyr_mirror_realm,
# user_profile data.
# TODO: Move all of these data to register_ret and pull from there
fullname = user_profile.full_name,
email = user_profile.email,
enter_sends = user_profile.enter_sends,
user_id = user_profile.id,
is_admin = user_profile.is_realm_admin,
can_create_streams = user_profile.can_create_streams(),
autoscroll_forever = user_profile.autoscroll_forever,
default_desktop_notifications = user_profile.default_desktop_notifications,
avatar_url = avatar_url(user_profile),
avatar_url_medium = avatar_url(user_profile, medium=True),
avatar_source = user_profile.avatar_source,
timezone = user_profile.timezone,
# Stream message notification settings:
stream_desktop_notifications_enabled = user_profile.enable_stream_desktop_notifications,
stream_sounds_enabled = user_profile.enable_stream_sounds,
# Private message and @-mention notification settings:
desktop_notifications_enabled = desktop_notifications_enabled,
sounds_enabled = user_profile.enable_sounds,
enable_offline_email_notifications = user_profile.enable_offline_email_notifications,
pm_content_in_desktop_notifications = user_profile.pm_content_in_desktop_notifications,
enable_offline_push_notifications = user_profile.enable_offline_push_notifications,
enable_online_push_notifications = user_profile.enable_online_push_notifications,
enable_digest_emails = user_profile.enable_digest_emails,
# Realm foreign key data from register_ret.
# TODO: Rename these to match register_ret values.
subbed_info = register_ret['subscriptions'],
unsubbed_info = register_ret['unsubscribed'],
neversubbed_info = register_ret['never_subscribed'],
people_list = register_ret['realm_users'],
bot_list = register_ret['realm_bots'],
initial_pointer = register_ret['pointer'],
initial_presences = register_ret['presences'],
event_queue_id = register_ret['queue_id'],
# Misc. extra data.
have_initial_messages = user_has_messages,
initial_servertime = time.time(), # Used for calculating relative presence age
default_language_name = get_language_name(register_ret['default_language']),
language_list_dbl_col = get_language_list_for_templates(register_ret['default_language']),
language_list = get_language_list(),
needs_tutorial = needs_tutorial,
first_in_realm = first_in_realm,
prompt_for_invites = prompt_for_invites,
notifications_stream = notifications_stream,
cross_realm_bots = list(get_cross_realm_dicts()),
unread_count = approximate_unread_count(user_profile),
furthest_read_time = sent_time_in_epoch_seconds(latest_read),
has_mobile_devices = num_push_devices_for_user(user_profile) > 0,
)
# These fields will be automatically copied from register_ret into
# page_params. It is a goal to move more of the page_params list
# into this sort of cleaner structure.
page_params_core_fields = [
'alert_words',
'attachments',
'default_language',
'emoji_alt_code',
'last_event_id',
'left_side_userlist',
'max_icon_file_size',
'max_message_id',
'muted_topics',
'realm_add_emoji_by_admins_only',
'realm_allow_message_editing',
'realm_authentication_methods',
'realm_bot_domain',
'realm_create_stream_by_admins_only',
'realm_default_language',
'realm_default_streams',
'realm_email_changes_disabled',
'realm_emoji',
'realm_filters',
'realm_icon_source',
'realm_icon_url',
'realm_invite_by_admins_only',
'realm_invite_required',
'realm_message_content_edit_limit_seconds',
'realm_name',
'realm_name_changes_disabled',
'realm_restricted_to_domain',
'realm_waiting_period_threshold',
'referrals',
'twenty_four_hour_time',
'zulip_version',
]
for field_name in page_params_core_fields:
page_params[field_name] = register_ret[field_name]
if narrow_stream is not None:
# In narrow_stream context, initial pointer is just latest message
recipient = get_recipient(Recipient.STREAM, narrow_stream.id)
try:
initial_pointer = Message.objects.filter(recipient=recipient).order_by('id').reverse()[0].id
except IndexError:
initial_pointer = -1
page_params["narrow_stream"] = narrow_stream.name
if narrow_topic is not None:
page_params["narrow_topic"] = narrow_topic
page_params["narrow"] = [dict(operator=term[0], operand=term[1]) for term in narrow]
page_params["max_message_id"] = initial_pointer
page_params["initial_pointer"] = initial_pointer
page_params["have_initial_messages"] = (initial_pointer != -1)
statsd.incr('views.home')
show_invites = True
# Some realms only allow admins to invite users
if user_profile.realm.invite_by_admins_only and not user_profile.is_realm_admin:
show_invites = False
request._log_data['extra'] = "[%s]" % (register_ret["queue_id"],)
response = render_to_response('zerver/index.html',
{'user_profile': user_profile,
'page_params': simplejson.encoder.JSONEncoderForHTML().encode(page_params),
'nofontface': is_buggy_ua(request.META.get("HTTP_USER_AGENT", "Unspecified")),
'avatar_url': avatar_url(user_profile),
'show_debug':
settings.DEBUG and ('show_debug' in request.GET),
'pipeline': settings.PIPELINE_ENABLED,
'show_invites': show_invites,
'is_admin': user_profile.is_realm_admin,
'show_webathena': user_profile.realm.webathena_enabled,
'enable_feedback': settings.ENABLE_FEEDBACK,
'embedded': narrow_stream is not None,
},
request=request)
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
return response
@zulip_login_required
def desktop_home(request):
# type: (HttpRequest) -> HttpResponse
return HttpResponseRedirect(reverse('zerver.views.home.home'))
def is_buggy_ua(agent):
# type: (str) -> bool
"""Discrimiate CSS served to clients based on User Agent
Due to QTBUG-3467, @font-face is not supported in QtWebKit.
This may get fixed in the future, but for right now we can
just serve the more conservative CSS to all our desktop apps.
"""
return ("Humbug Desktop/" in agent or "Zulip Desktop/" in agent or "ZulipDesktop/" in agent) and \
"Mac" not in agent
| apache-2.0 | -2,616,524,014,721,912,000 | 44.75067 | 113 | 0.642309 | false |
ZeitOnline/zeit.newsletter | src/zeit/newsletter/browser/edit.py | 1 | 2579 | from zeit.cms.i18n import MessageFactory as _
from zope.cachedescriptors.property import Lazy as cachedproperty
import os.path
import zeit.cms.browser.view
import zeit.cms.content.interfaces
import zeit.cms.interfaces
import zeit.content.image.interfaces
import zeit.content.video.interfaces
import zeit.edit.browser.form
import zeit.edit.browser.landing
import zeit.edit.browser.view
import zeit.newsletter.interfaces
import zope.formlib.form
class LandingZoneBase(zeit.edit.browser.landing.LandingZone):
uniqueId = zeit.edit.browser.view.Form('uniqueId')
block_type = 'teaser'
def initialize_block(self):
content = zeit.cms.interfaces.ICMSContent(self.uniqueId)
self.block.reference = content
class GroupLandingZone(LandingZoneBase):
"""Handler to drop objects to the body's landing zone."""
order = 0
class TeaserLandingZone(LandingZoneBase):
"""Handler to drop objects after other objects."""
order = 'after-context'
class Teaser(zeit.cms.browser.view.Base):
@cachedproperty
def metadata(self):
return zeit.cms.content.interfaces.ICommonMetadata(
self.context.reference, None)
@cachedproperty
def image(self):
# XXX copy&paste&tweak of zeit.content.cp.browser.blocks.teaser.Display
content = self.context.reference
if content is None:
return
if zeit.content.video.interfaces.IVideoContent.providedBy(content):
return content.thumbnail
images = zeit.content.image.interfaces.IImages(content, None)
if images is None:
preview = zope.component.queryMultiAdapter(
(content, self.request), name='preview')
if preview:
return self.url(preview)
return
if not images.image:
return
group = images.image
for name in group:
basename, ext = os.path.splitext(name)
if basename.endswith('148x84'):
image = group[name]
return self.url(image, '@@raw')
class Advertisement(zeit.cms.browser.view.Base):
@cachedproperty
def image(self):
if not self.context.image:
return
return self.url(self.context.image, '@@raw')
class GroupTitle(zeit.edit.browser.form.InlineForm):
legend = None
prefix = 'group'
undo_description = _('edit group title')
form_fields = zope.formlib.form.FormFields(
zeit.newsletter.interfaces.IGroup).select('title')
class Empty(object):
def render(self):
return u''
| bsd-3-clause | -2,014,002,219,757,949,200 | 27.032609 | 79 | 0.67119 | false |
vadyur/script.media.aggregator | anidub.py | 1 | 12586 | # coding: utf-8
import log
from log import debug
from settings import Settings
from base import *
import feedparser, urllib2, re
from bs4 import BeautifulSoup
from nfowriter import *
from strmwriter import *
import requests, filesystem
###################################################################################################
class DescriptionParser(DescriptionParserBase):
#==============================================================================================
def get_content(self, url):
page = urllib2.urlopen(url)
return page
#==============================================================================================
def __init__(self, url):
Informer.__init__(self)
self._dict = dict()
self.content = self.get_content(url)
#html_doc = '<?xml version="1.0" encoding="UTF-8" ?>\n<html>' + content.encode('utf-8') + '\n</html>'
self.soup = BeautifulSoup(self.content, 'html.parser')
self.OK = self.parse()
#==============================================================================================
def get_tag(self, x):
return {
u'Год: ': u'year',
u'Жанр: ': u'genre',
u'Описание: ': u'plot',
u'Режиссер: ': u'director',
u'Продолжительность: ': u'runtime',
u'Страна: ': u'country',
}.get(x, u'')
#==============================================================================================
def clean(self, title):
try:
title = title.split(u' ТВ-')[0]
title = title.split(u' TV-')[0]
title = title.split(u' [')[0]
except:
pass
return title.strip()
#==============================================================================================
def get_title(self, full_title):
try:
found = re.search('^(.+?) /', full_title).group(1)
return self.clean(found)
except AttributeError:
return full_title
#==============================================================================================
def get_original_title(self, full_title):
try:
found = re.search('^.+? / (.+)', full_title).group(1)
return self.clean(found)
except AttributeError:
return full_title
#==============================================================================================
def parse_season_from_title(self, title):
try:
found = re.search(r"(\d) \[\d+\D+\d+\]", title)
if found:
try:
self._dict['season'] = int(found.group(1))
return
except:
pass
parts = title.split(u'ТВ-')
if len(parts) == 1:
parts = title.split(u'TV-')
if len(parts) > 1:
found = re.search('([0-9]+)', parts[1]).group(1)
self._dict['season'] = int(found)
except:
pass
#==============================================================================================
def get_episodes_num(self, full_title):
try:
found = re.search(' \[([0-9]+) ', full_title).group(1)
return int(found)
except AttributeError:
return 1
def date_added_duration(self):
ul = self.soup.find('ul', class_='story_inf')
if ul:
for li in ul.find_all('li'):
txt = li.get_text()
parts = txt.split(':')
if len(parts) > 1 and parts[0] == u'Дата':
date, t = parts[1].split(',') # d u' 30-09-2012' unicode
from datetime import datetime, timedelta
day = timedelta(1)
yesterday = datetime.today() - day
#date = ' 30-09-2012'
if u'Сегодня' in date:
d = datetime.today()
elif u'Вчера' in date:
d = yesterday
else:
try:
d = datetime.strptime(date.strip(), '%d-%m-%Y')
except TypeError:
d = datetime.today()
dt = datetime.today() - d
return dt
#==============================================================================================
def parse(self):
tag = u''
self._dict['gold'] = False
self._dict['season'] = 1
for title in self.soup.select('#news-title'):
full_title = title.get_text()
debug(full_title)
self._dict['title'] = self.get_title(full_title)
self._dict['originaltitle'] = self.get_original_title(full_title)
self.parse_season_from_title(full_title)
self._dict['episodes'] = self.get_episodes_num(full_title)
for b in self.soup.select('div.xfinfodata b'):
try:
text = b.get_text()
tag = self.get_tag(text)
if tag != '':
span = b.find_next_sibling('span')
self._dict[tag] = span.get_text().strip()
except:
pass
for div in self.soup.select('div.story_c'):
try:
text = div.get_text()
text = text.split(u'Описание:')[1]
text = text.split(u'Эпизоды')[0]
text = text.split(u'Скриншоты')[0]
text = text.strip()
self._dict['plot'] = text
#debug('---')
#debug(text)
#debug('---')
except:
pass
for b in self.soup.select('div.story_h .rcol sup b'):
try:
text = b.get_text()
text = text.split(' ')[0]
self._dict['rating'] = float(text) * 2
debug('rating: ' + str(self._dict['rating']))
except:
pass
for img in self.soup.select('span.poster img'):
try:
self._dict['thumbnail'] = img['src'].strip()
debug(self._dict['thumbnail'])
except:
pass
fanart = []
for a in self.soup.select('ul.clr li a'):
try:
debug(a['href'])
fanart.append(a['href'].strip())
except:
pass
if len(fanart) != 0:
self._dict['fanart'] = fanart
# else:
# dt = self.date_added_duration()
# if dt and dt.days <= 14:
# return False
for img in self.soup.select('div.video_info a img'):
try:
self._dict['studio'] = img['alt'].strip()
debug(self._dict['studio'])
except:
pass
tags = []
for a in self.soup.select('a[href*="https://tr.anidub.com/tags/"]'):
tags.append(a.get_text().strip())
if len(tags) > 0:
self._dict['tag'] = tags
return True
###################################################################################################
def write_tvshow_nfo(parser, tvshow_api, tvshow_path):
try:
if write_tvshow_nfo.favorites:
parser.Dict().get('tag', []).append('favorites')
except:
pass
NFOWriter(parser, tvshow_api=tvshow_api).write_tvshow_nfo(tvshow_path)
return
###################################################################################################
def write_tvshow(content, path, settings):
with filesystem.save_make_chdir_context(path, 'Anidub.write_tvshow'):
d = feedparser.parse(content)
cnt = 0
settings.progress_dialog.update(0, 'anidub', path)
for item in d.entries:
write_tvshow_item(item, path, settings)
cnt += 1
settings.progress_dialog.update(cnt * 100 / len(d.entries), 'anidub', path)
def write_tvshow_item(item, path, settings, path_out=[]):
debug('-------------------------------------------------------------------------')
debug(item.link)
parser = DescriptionParser(item.link)
if parser.parsed():
title = parser.get_value('title')
debug(title)
originaltitle = parser.get_value('originaltitle')
debug(originaltitle)
season = parser.get_value('season')
from downloader import TorrentDownloader
TorrentDownloader(item.link, settings.torrents_path(), settings).download()
debug('Episodes: ' + str(parser.get_value('episodes')))
tvshow_path = make_fullpath(title, '')
tvshow_path = filesystem.join(path, tvshow_path)
debug(tvshow_path)
path_out.append(tvshow_path)
settings.update_paths.add(tvshow_path)
with filesystem.save_make_chdir_context(tvshow_path, 'Anidub.write_tvshow_item'):
tvshow_api = TVShowAPI.get_by(originaltitle, title)
write_tvshow_nfo(parser, tvshow_api, tvshow_path)
season_path = filesystem.join(tvshow_path, u'Season ' + unicode(season))
debug(season_path)
with filesystem.save_make_chdir_context(season_path, 'Anidub.write_tvshow_item_2'):
episodes = tvshow_api.episodes(season)
if len(episodes) < parser.get_value('episodes'):
for i in range(len(episodes) + 1, parser.get_value('episodes') + 1):
episodes.append({
'title': title,
'showtitle': title,
'short': 's%02de%02d' % (season, i),
'episode': i,
'season': season
})
for episode in episodes:
title = episode['title']
shortName = episode['short']
episodeNumber = episode['episode']
if episodeNumber <= parser.get_value('episodes'):
filename = str(episodeNumber) + '. ' + 'episode_' + shortName
debug(filename)
ep = tvshow_api.Episode(season, episodeNumber)
if ep:
episode = ep
STRMWriter(item.link).write(filename, season_path, episodeNumber=episodeNumber, settings=settings)
NFOWriter(parser, tvshow_api=tvshow_api).write_episode(episode, filename, season_path)
else:
skipped(item)
del parser
def get_session(settings):
s = requests.Session()
data = {"login_name": settings.anidub_login, "login_password": settings.anidub_password, "login": "submit"}
headers = {
'Host': 'tr.anidub.com',
'Origin': 'https://tr.anidub.com',
'Referer': 'https://tr.anidub.com/',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132'
}
login = s.post("https://tr.anidub.com/", data=data, headers=headers)
debug('Login status: %d' % login.status_code)
if 'login_name' in login.content:
debug('Login failed')
return s
def download_torrent(url, path, settings):
from base import save_hashes
save_hashes(path)
url = urllib2.unquote(url)
debug('download_torrent:' + url)
s = get_session(settings)
page = s.get(url)
#debug(page.text.encode('utf-8'))
soup = BeautifulSoup(page.text, 'html.parser')
try:
a = soup.select_one('#tv720 div.torrent_h a')
except TypeError:
a = None
try:
if a is None:
a = soup.select_one('div.torrent_h > a')
except TypeError:
a = None
if a is not None:
href = 'https://tr.anidub.com' + a['href']
debug(s.headers)
r = s.get(href, headers={'Referer': url})
debug(r.headers)
if 'Content-Type' in r.headers:
if not 'torrent' in r.headers['Content-Type']:
return False
try:
with filesystem.fopen(path, 'wb') as torr:
for chunk in r.iter_content(100000):
torr.write(chunk)
save_hashes(path)
return True
except:
pass
return False
def write_pages(url, path, settings, params={}, filter_fn=None, dialog_title = None, path_out=[]):
s = get_session(settings)
if params:
page = s.post(url, data=params)
else:
page = s.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
page_no = 1
cnt = 0
class Item:
def __init__(self, link, title):
self.link = link
self.title = title
with filesystem.save_make_chdir_context(path, 'Anidub.write_pages'):
while True:
if params:
selector = soup.select('div.search_post > div.text > h2 > a')
else:
selector = soup.select('article.story > div.story_h > div.lcol > h2 > a')
if not selector:
break
settings.progress_dialog.update(0, dialog_title, path)
for a in selector:
log.debug(a['href'])
link = a['href']
title = a.get_text()
if filter_fn and filter_fn(title):
continue
write_tvshow_item(Item(link, title), path, settings, path_out)
cnt += 1
settings.progress_dialog.update(cnt * 100 / len(selector), dialog_title, path)
if not 'favorites' in url:
break
page_no += 1
page = s.get(url + 'page/%d/' % page_no)
if page.status_code == requests.codes.ok:
soup = BeautifulSoup(page.text, 'html.parser')
else:
break
return cnt
def write_favorites(path, settings):
write_pages('https://tr.anidub.com/favorites/', path, settings, dialog_title=u'Избранное AniDUB')
def search_generate(what, settings, path_out):
def filter(title):
if what not in title:
return True
return False
write_tvshow_nfo.favorites = False
return write_pages('https://tr.anidub.com/index.php?do=search',
settings.anime_tvshow_path(), settings,
{'do': 'search',
'subaction': 'search',
'story': what.encode('utf-8')}, filter,
dialog_title=u'Поиск AniDUB',
path_out=path_out)
###################################################################################################
def run(settings):
if settings.anime_save:
if settings.anidub_rss:
write_tvshow_nfo.favorites = False
write_tvshow(settings.anidub_url, settings.anime_tvshow_path(), settings)
if settings.anidub_favorite:
write_tvshow_nfo.favorites = True
write_favorites(settings.anime_tvshow_path(), settings)
if __name__ == '__main__':
settings = Settings('../media_library')
run(settings)
| gpl-3.0 | 2,000,693,373,387,839,200 | 26.610619 | 118 | 0.567228 | false |
robdobsn/AmazonEchoShopping | WaitroseService/WaitroseScraper.py | 1 | 20691 | # Waitrose web scraper
__author__ = 'robdobsn'
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as webdriverui
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, WebDriverException, TimeoutException
from selenium.webdriver.support import expected_conditions as EC
from bs4 import BeautifulSoup
import logging
import json
import re
class WaitroseScraper():
def __init__(self):
logging.info("Waitrose scraper starting")
self.isInitalized = False
self.isLoggedIn = False
self.webDriverType = "PhantomJS"
self.execUsingJS = False
def clickButtonByClassName(self, className):
if self.execUsingJS:
self.webDriver.execute_script("document.getElementsByClassName('" + className + "')[0].click()")
else:
btn = self.webDriver.find_element_by_class_name(className)
btn.click()
def clickButtonByXPath(self, xpath):
if self.execUsingJS:
self.webDriver.execute_script("return document.evaluate('" + xpath + "', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.click()")
else:
btn = self.webDriver.find_element_by_xpath(xpath)
btn.click()
def clickButtonByCSSSelector(self, cssSelector):
btn = self.webDriver.find_element_by_css_selector(cssSelector)
btn.click()
def checkButtonEnabledByCSSSelector(self, cssSelector):
btn = self.webDriver.find_element_by_css_selector(cssSelector)
return btn.is_enabled() and btn.is_displayed()
def sendKeysToFieldById(self, elemId, strToSend, pressEnterAfter, clearFirst):
# if self.execUsingJS:
# self.webDriver.execute_script("document.getElementsByClassName('" + elemId + "').value = '" + strToSend)
# else:
print("Sending keys to elemId " + elemId + " keys = " + strToSend)
field = self.webDriver.find_element_by_id(elemId)
print(field)
if (clearFirst):
field.send_keys(Keys.CONTROL + "a")
field.send_keys(Keys.DELETE)
field.send_keys(strToSend + (Keys.RETURN if pressEnterAfter else ""))
def debugDumpPageSource(self, filenameExtra=""):
with open("debugPageSource" + filenameExtra + ".html", "w") as debugDumpFile:
debugDumpFile.write(self.webDriver.page_source)
self.webDriver.save_screenshot('debugPageImage.png')
# Start the web driver (runs the browser)
def startWebDriver(self):
# Clear current session file info
with open('browserSession.json', 'w') as outfile:
json.dump({}, outfile)
# Create WebDriver
if self.webDriverType == "Chrome":
try:
self.webDriver = webdriver.Chrome()
except WebDriverException:
logging.error("startWebDriver() Chrome Failed to start")
return False
elif self.webDriverType == "Firefox":
try:
self.webDriver = webdriver.Firefox()
except WebDriverException:
logging.error("startWebDriver() Firefox Failed to start")
return False
elif self.webDriverType == "PhantomJS":
try:
self.webDriver = webdriver.PhantomJS() # or add to your PATH
except:
try:
self.webDriver = webdriver.PhantomJS(
executable_path='C:\ProgramData\PhantomJS\bin')
except:
try:
self.webDriver = webdriver.PhantomJS(
executable_path='/usr/local/lib/node_modules/phantomjs/lib/phantom/bin/phantomjs')
except:
try:
self.webDriver = webdriver.PhantomJS(
executable_path=r'C:\Users\rob_2\AppData\Roaming\npm\node_modules\phantomjs\lib\phantom\bin\phantomjs.exe')
except:
logging.error("Failed to load the PhantomJS webdriver")
return False
# Set the window size (seems to be needed in phantomJS particularly
# This is probably because the website responds in mobile mode?
self.webDriver.set_window_size(1280,1024)
# Save session info
url = self.webDriver.command_executor._url
session_id = self.webDriver.session_id
with open('browserSession.json', 'w') as outfile:
json.dump({"url": url, "session_id": session_id}, outfile)
return True
def websiteLogin(self, username, password, attemptIdx):
try:
self.webDriver.save_screenshot('debug1_'+str(attemptIdx)+'.png')
logging.info("Waiting for signInRegister button")
wait = WebDriverWait(self.webDriver, 30)
wait.until(EC.visibility_of_element_located((By.CLASS_NAME, "js-sign-in-register")))
logging.info("waitroseLogin() pressing signInRegister button")
self.clickButtonByClassName('js-sign-in-register')
self.webDriver.save_screenshot('debug2_'+str(attemptIdx)+'.png')
try:
print("Starting to wait for logon-email")
wait = WebDriverWait(self.webDriver, 30)
wait.until(EC.visibility_of_element_located((By.ID, "logon-email")))
print("Finished waiting for logon-email")
self.webDriver.save_screenshot('debug3_' + str(attemptIdx) + '.png')
try:
logging.info("waitroseLogin() entering username")
self.debugDumpPageSource("contbutton")
self.sendKeysToFieldById('logon-email', username, False, True)
self.webDriver.save_screenshot('debug4_' + str(attemptIdx) + '.png')
# self.clickButtonByXPath("//input[@type='button' and @value='Continue']")
if (self.checkButtonEnabledByCSSSelector("input[value='Continue'][type='button']")):
self.clickButtonByCSSSelector("input[value='Continue'][type='button']")
try:
logging.info("waitroseLogin() waiting for logon-password visible")
wait = WebDriverWait(self.webDriver, 60)
wait.until(EC.visibility_of_element_located((By.ID, "logon-password")))
self.webDriver.save_screenshot('debug5_' + str(attemptIdx) + '.png')
try:
logging.info("waitroseLogin() entering password")
self.sendKeysToFieldById('logon-password', password, False, True)
#self.clickButtonById('logon-button-sign-in')
self.clickButtonByCSSSelector("input[value='Sign in'][type='button']")
self.webDriver.save_screenshot('debug6_' + str(attemptIdx) + '.png')
logging.info("waitroseLogin() waiting for trolley-total to be visible")
wait = WebDriverWait(self.webDriver, 60)
wait.until(EC.visibility_of_element_located((By.CLASS_NAME, "trolley-total")))
self.webDriver.save_screenshot('debug7_' + str(attemptIdx) + '.png')
elem2 = self.webDriver.find_element_by_class_name('trolley-total')
if elem2:
logging.info("waitroseLogin() basket found")
else:
logging.info("waitroseLogin() basket not found")
return True
except WebDriverException as err:
logging.error("waitroseLogin() Cannot find logon-password after wait " + err.msg)
self.debugDumpPageSource()
except WebDriverException as err:
logging.error("waitroseLogin() Cannot find logon-password field" + err.msg)
self.debugDumpPageSource()
except WebDriverException as err:
logging.error("waitroseLogin() Error entering logon-email" + err.msg)
self.debugDumpPageSource()
except WebDriverException as err:
logging.error("waitroseLogin() Cannot find logon-email field" + err.msg)
self.debugDumpPageSource()
except WebDriverException as err:
logging.error("waitroseLogin() Cannot find sign-in-register button" + err.msg)
self.debugDumpPageSource()
return False
def getBasketSummary(self):
basketSummary = {}
# Ensure we wait until the trolley-total is visible
try:
wait = WebDriverWait(self.webDriver, 20)
wait.until(EC.visibility_of_element_located((By.CLASS_NAME, "trolley-total")))
except TimeoutException:
logging.error("Get basket summary timeout exception")
self.debugDumpPageSource()
return None
except WebDriverException:
logging.error("Get basket summary webdriver element exception")
self.debugDumpPageSource()
return None
# Get basket total price
try:
totalElem = self.webDriver.find_element_by_class_name('trolley-total')
if totalElem:
reTotalElem = re.search("([0-9]{1,4}\.[0-9]{2})", totalElem.text)
if reTotalElem:
basketSummary["totalPrice"] = reTotalElem.group(1)
logging.info("waitrose: Basket: total=" + str(basketSummary["totalPrice"]))
# Get number of basket items
summaryElem = self.webDriver.find_element_by_class_name('trolley-summary')
if summaryElem:
reSummaryElem = re.search("([0-9]{1,4}) items", summaryElem.text)
if reSummaryElem:
basketSummary["numItems"] = reSummaryElem.group(1)
logging.info("waitrose: Basket: num items=" + str(basketSummary["numItems"]))
except WebDriverException:
logging.error("waitrose: Get basket summary webdriver element exception")
self.debugDumpPageSource()
return None
# Return info found
return basketSummary
def getElemAttrIfPresent(self, soup, elemName, className, subElem, attrName, regexReplace, destDict=None, dictName=None):
rslt = ""
try:
el = soup.find(elemName, class_=className)
if subElem is not "":
el = el.find(subElem)
if attrName == "text":
rslt = el.get_text()
else:
rslt = el[attrName]
if regexReplace is not "":
rslt = re.sub(regexReplace, "", rslt)
if destDict is not None:
destDict[dictName] = rslt
except WebDriverException:
logging.error("waitrose: Error extracting element " + elemName + " " + className)
self.debugDumpPageSource()
except:
logging.error("waitrose: Error (not webdriver) extracting element " + elemName + " " + className)
self.debugDumpPageSource()
return rslt
def getShoppingItems(self, isTrolleyPage):
# Make sure all items on the page are loaded - lazy loader
try:
self.debugDumpPageSource("m-product")
webdriverui.WebDriverWait(self.webDriver, 10)\
.until(EC.visibility_of_element_located((By.CLASS_NAME, "m-product")))
except WebDriverException:
logging.error("Wait for m-product webdriver element exception")
return []
productsFound = self.webDriver.find_elements_by_class_name("m-product")
print("waitrose: Lazy loading products - currently " + str(len(productsFound)) + " found")
numRepeats = 0
if len(productsFound) > 10:
while True:
prevFound = len(productsFound)
self.webDriver.execute_script("window.scrollBy(0,window.innerHeight)")
productsFound = self.webDriver.find_elements_by_class_name("m-product")
print("Loading products - currently " + str(len(productsFound)) + " found")
if len(productsFound) <= prevFound:
numRepeats += 1
if numRepeats > 20:
break
else:
numRepeats = 0
print("Done lazy loading products " + str(len(productsFound)) + " found")
# Go through items in the list on the current page
shoppingItems = []
for product in productsFound:
# Get HTML for this product
basketIt = {}
el = product.get_attribute("innerHTML")
productSoup = BeautifulSoup(el, "html.parser")
# Extract some common details
self.getElemAttrIfPresent(productSoup, "a", "m-product-open-details", "", "href", "", basketIt, "detailsHref")
self.getElemAttrIfPresent(productSoup, "a", "m-product-open-details", "img", "src", "", basketIt, "imageSrc")
self.getElemAttrIfPresent(productSoup, "div", "m-product-volume", "", "text", r"\W", basketIt, "productVolume")
# Check if we are doing the trolley page - which has extra info like number of items ordered
if isTrolleyPage:
self.getElemAttrIfPresent(productSoup, "div", "m-product-title", "a", "text", "", basketIt, "productTitle")
if not "productTitle" in basketIt or basketIt["productTitle"] == "":
self.getElemAttrIfPresent(productSoup, "a", "m-product-open-details", "img", "title", "", basketIt,
"productTitle")
self.getElemAttrIfPresent(productSoup, "div", "quantity-append", "input", "value", "", basketIt,
"trolleyQuantity")
self.getElemAttrIfPresent(productSoup, "p", "m-product-details", "span", "text", "", basketIt,
"trolleyPrice")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-price", "",
basketIt,
"price")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-priceperkg",
"", basketIt, "pricePerKg")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-orderitemid",
"", basketIt, "orderItemId")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-producttype",
"", basketIt, "productType")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-productid",
"", basketIt, "productId")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-uom", "", basketIt,
"UOM")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-weighttype",
"", basketIt, "weightType")
self.getElemAttrIfPresent(productSoup, "div", "m-product-details-container", "div", "data-substitute",
"", basketIt, "substitute")
else:
self.getElemAttrIfPresent(productSoup, "div", "m-product-price-container", "span", "text", "\W", basketIt,
"price")
self.getElemAttrIfPresent(productSoup, "a", "m-product-open-details", "", "text", "", basketIt,
"productTitle")
if not "productTitle" in basketIt or basketIt["productTitle"] == "":
self.getElemAttrIfPresent(productSoup, "a", "m-product-open-details", "img", "title", "", basketIt,
"productTitle")
# Check if the product at least has a title and only add to list if it does
if not "productTitle" in basketIt or basketIt["productTitle"] == "":
logging.error("Extract Shopping List: Failed to extract product name")
else:
shoppingItems.append(basketIt)
return shoppingItems
def getTrolleyContents(self):
# Ensure we wait until the trolley-total is visible
try:
wait = WebDriverWait(self.webDriver, 20)
wait.until(EC.visibility_of_element_located((By.CLASS_NAME, "trolley-total")))
except WebDriverException:
logging.error("Wait for Trolley-Total webdriver element exception")
self.debugDumpPageSource()
return None
# Navigate to the basket contents
try:
self.clickButtonByXPath('//div[@class="mini-trolley"]//a')
wait = WebDriverWait(self.webDriver, 30)
wait.until(EC.visibility_of_element_located((By.ID, "my-trolley")))
except NoSuchElementException:
logging.error("Press view trolley button no such element")
self.debugDumpPageSource()
return None
except WebDriverException:
logging.error("Press view trolley button webdriver element exception")
self.debugDumpPageSource()
return None
# Get the shopping items on the current page
return self.getShoppingItems(True)
def getFavourites(self):
# Ensure we wait until the favourites is visible
try:
wait = WebDriverWait(self.webDriver, 20)
wait.until(EC.visibility_of_element_located((By.CLASS_NAME, "js-navbar-favourites")))
except WebDriverException:
logging.error("Wait for favourites button webdriver element exception")
self.debugDumpPageSource()
return None
# Navigate to the favourites
try:
FAVOURITES_BUTTON_XPATH = '//a[@class="js-navbar-favourites"]'
elemBasketBtn = self.webDriver.find_element_by_xpath(FAVOURITES_BUTTON_XPATH)
print(elemBasketBtn)
elemBasketBtn.click()
wait = WebDriverWait(self.webDriver, 60)
wait.until(EC.visibility_of_element_located((By.CLASS_NAME, "products-grid")))
except NoSuchElementException:
logging.error("Press view favourites button no such element")
self.debugDumpPageSource()
return None
except WebDriverException:
logging.error("Press view favourites button webdriver element exception")
self.debugDumpPageSource()
return None
# Get the shopping items on the current page
return self.getShoppingItems(False)
# Handle site login
def siteLogin(self, siteUrl, username, password, titleMustContainStr):
# Start webDriver
if not self.startWebDriver():
logging.error("Unable to start webdriver")
return False
self.isInitalized = True
# Go to URL
logging.info("Webdriver going to " + siteUrl)
self.webDriver.get(siteUrl)
logging.info("Webdriver site title = " + self.webDriver.title)
if not titleMustContainStr in self.webDriver.title:
logging.error("Site " + siteUrl + " title doesn't contain " + titleMustContainStr)
self.debugDumpPageSource()
return False
# Handle login
self.isLoggedIn = self.websiteLogin(username, password, 1)
# Succeeded so far
return self.isLoggedIn
# Ensure that we are logged in
def ensureLoggedIn(self, username, password):
# Ensure we are initialised
if not self.isInitalized:
self.siteLogin("http://www.waitrose.com", username, password, "Waitrose")
# Try to login again if not currently logged in
if self.isInitalized:
if not self.isLoggedIn:
self.isLoggedIn = self.websiteLogin(username, password, 2)
return self.isLoggedIn
| isc | -783,531,863,977,082,500 | 46.895833 | 169 | 0.581654 | false |
kidscancode/gamedev | pygame template.py | 1 | 1508 | # Pygame Template
# Use this to start a new Pygame project
# KidsCanCode 2015
import pygame
import random
# define some colors (R, G, B)
WHITE = (255, 255, 255)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
BLACK = (0, 0, 0)
FUCHSIA = (255, 0, 255)
GRAY = (128, 128, 128)
LIME = (0, 128, 0)
MAROON = (128, 0, 0)
NAVYBLUE = (0, 0, 128)
OLIVE = (128, 128, 0)
PURPLE = (128, 0, 128)
RED = (255, 0, 0)
SILVER = (192, 192, 192)
TEAL = (0, 128, 128)
YELLOW = (255, 255, 0)
ORANGE = (255, 128, 0)
CYAN = (0, 255, 255)
# basic constants to set up your game
WIDTH = 360
HEIGHT = 480
FPS = 30
BGCOLOR = BLACK
# initialize pygame
pygame.init()
# initialize sound - uncomment if you're using sound
# pygame.mixer.init()
# create the game window and set the title
screen = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("My Game")
# start the clock
clock = pygame.time.Clock()
# set the 'running' variable to False to end the game
running = True
# start the game loop
while running:
# keep the loop running at the right speed
clock.tick(FPS)
# Game loop part 1: Events #####
for event in pygame.event.get():
# this one checks for the window being closed
if event.type == pygame.QUIT:
pygame.quit()
# add any other events here (keys, mouse, etc.)
# Game loop part 2: Updates #####
# Game loop part 3: Draw #####
screen.fill(BGCOLOR)
# after drawing, flip the display
pygame.display.flip()
# close the window
pygame.quit()
| mit | 3,454,655,415,024,161,000 | 22.936508 | 55 | 0.63992 | false |
JacobFischer/Joueur.py | games/anarchy/forecast.py | 1 | 2074 | # Forecast: The weather effect that will be applied at the end of a turn, which causes fires to spread.
# DO NOT MODIFY THIS FILE
# Never try to directly create an instance of this class, or modify its member variables.
# Instead, you should only be reading its variables and calling its functions.
from games.anarchy.game_object import GameObject
# <<-- Creer-Merge: imports -->> - Code you add between this comment and the end comment will be preserved between Creer re-runs.
# you can add additional import(s) here
# <<-- /Creer-Merge: imports -->>
class Forecast(GameObject):
"""The class representing the Forecast in the Anarchy game.
The weather effect that will be applied at the end of a turn, which causes fires to spread.
"""
def __init__(self):
"""Initializes a Forecast with basic logic as provided by the Creer code generator."""
GameObject.__init__(self)
# private attributes to hold the properties so they appear read only
self._controlling_player = None
self._direction = ""
self._intensity = 0
@property
def controlling_player(self):
"""The Player that can use WeatherStations to control this Forecast when its the nextForecast.
:rtype: games.anarchy.player.Player
"""
return self._controlling_player
@property
def direction(self):
"""The direction the wind will blow fires in. Can be 'north', 'east', 'south', or 'west'.
:rtype: str
"""
return self._direction
@property
def intensity(self):
"""How much of a Building's fire that can be blown in the direction of this Forecast. Fire is duplicated (copied), not moved (transfered).
:rtype: int
"""
return self._intensity
# <<-- Creer-Merge: functions -->> - Code you add between this comment and the end comment will be preserved between Creer re-runs.
# if you want to add any client side logic (such as state checking functions) this is where you can add them
# <<-- /Creer-Merge: functions -->>
| mit | 66,524,175,843,730,740 | 37.407407 | 146 | 0.671167 | false |
hhucn/git-vote | git-vote/__main__.py | 1 | 3022 | import argparse
import collections
import re
import subprocess
NOTES_REF = 'refs/notes/votes'
Vote = collections.namedtuple('Vote', ['commit', 'user'])
def vote(args):
assert args.user, 'TODO: determine user automatically'
vote = 'vote:%s' % args.user
subprocess.check_call([
'git', 'notes', '--ref', NOTES_REF, 'append', '--allow-empty', '-m', vote, args.COMMIT],
cwd=args.repo_dir)
# TODO: prevent voting twice as same user
def get_all_votes(repo_dir):
output_bytes = subprocess.check_output([
'git', 'notes', '--ref', NOTES_REF, 'list'],
cwd=repo_dir)
output = output_bytes.decode('utf-8')
for line in output.splitlines():
if not line:
continue
votenote_ref, commit_id = line.split()
# TODO use dulwich or something more efficient here
votenote_bytes = subprocess.check_output(
['git', 'show', votenote_ref],
cwd=repo_dir)
votenote_content = votenote_bytes.decode('utf-8') # TODO ignore invalid votes
for voteline in votenote_content.splitlines():
if not voteline:
continue
m = re.match(r'^vote:(?P<user>[a-z0-9@._]+)$', voteline.strip()) # TODO check re for user spec
if not m:
print('Skipping crap %r' % voteline)
continue
user = m.group('user')
yield Vote(commit=commit_id, user=user)
def print_list(args):
all_votes = get_all_votes(args.repo_dir)
all_votes_sorted = sorted(all_votes, key=lambda v: (v.commit, v.user))
for v in all_votes_sorted:
print('%s: +1 from %s' % (v.commit, v.user))
def tally(all_votes):
""" Returns a dict commit id => set of users """
res = collections.defaultdict(set)
for v in all_votes:
res[v.commit].add(v.user)
return res
def print_tally(args):
all_votes = get_all_votes(args.repo_dir)
for commit, votes in sorted(tally(all_votes).items(), key=lambda kv: (kv[1], kv[0])):
print('%s: %d votes' % (commit, len(votes)))
def print_elect(args):
all_votes = get_all_votes(args.repo_dir)
winner_vcount, winner_commit = max((len(votes), commit) for commit, votes in tally(all_votes).items())
# TODO more algorithms
print('%s won the election with %d votes' % (winner_commit, winner_vcount))
def main():
parser = argparse.ArgumentParser('Vote on git commands')
parser.add_argument('-r', '--repo-dir', metavar='DIR', help='root directory of the repository to modify')
subparsers = parser.add_subparsers(dest='cmd')
vote_parser = subparsers.add_parser('vote', help='Vote for commit')
vote_parser.add_argument('--user', metavar='USER_ID', help='ID of the user to vote as')
vote_parser.add_argument('COMMIT', help='reference to the commit to vote for')
subparsers.add_parser('list', help='List all votes')
subparsers.add_parser('tally', help='Tally all votes')
subparsers.add_parser('elect', help='Elect a commit')
args = parser.parse_args()
if args.cmd == 'vote':
vote(args)
elif args.cmd == 'list':
print_list(args)
elif args.cmd == 'tally':
print_tally(args)
elif args.cmd == 'elect':
print_elect(args)
else:
parser.print_help()
if __name__ == '__main__':
main()
| apache-2.0 | 2,600,720,082,252,724,000 | 28.627451 | 106 | 0.676704 | false |
DigitalCampus/django-nurhi-oppia | oppia/tests/av/test_permissions.py | 1 | 2347 | from django.urls import reverse
from django.test import TestCase
from oppia.tests.utils import *
class PermissionsViewTest(TestCase):
fixtures = ['user.json', 'oppia.json', 'quiz.json', 'permissions.json']
def setUp(self):
super(PermissionsViewTest, self).setUp()
self.login_url = reverse('profile_login')
self.admin_user = {
'user': 'admin',
'password': 'password'
}
self.staff_user = {
'user': 'staff',
'password': 'password'
}
self.normal_user = {
'user': 'demo',
'password': 'password'
}
self.teacher_user = {
'user': 'teacher',
'password': 'password'
}
def get_view(self, route, user=None):
if user is not None:
self.client.login(username=user['user'], password=user['password'])
return self.client.get(route)
def assert_response(self, view, status_code, user=None, view_kwargs=None):
route = reverse(view, kwargs=view_kwargs)
res = self.get_view(route, user)
self.assertEqual(res.status_code, status_code)
return res
def assert_can_view(self, view, user=None, view_kwargs=None):
return self.assert_response(view, 200, user, view_kwargs)
def assert_cannot_view(self, view, user=None, view_kwargs=None):
return self.assert_response(view, 401, user, view_kwargs)
def assert_unauthorized(self, view, user=None, view_kwargs=None):
return self.assert_response(view, 403, user, view_kwargs)
def assert_must_login(self, view, user=None, view_kwargs=None):
route = reverse(view, kwargs=view_kwargs)
res = self.get_view(route, user)
login_url = self.login_url + '?next=' + route
self.assertRedirects(res, login_url)
return res
############ upload media file #############
def test_anon_cantview_av_upload(self):
self.assert_must_login('oppia_av_upload')
def test_admin_canview_av_upload(self):
self.assert_can_view('oppia_av_upload', self.admin_user)
def test_staff_canview_av_upload(self):
self.assert_can_view('oppia_av_upload', self.staff_user)
def test_student_cantview_av_upload(self):
self.assert_unauthorized('oppia_av_upload', self.normal_user)
| gpl-3.0 | -4,483,428,020,845,946,400 | 33.514706 | 79 | 0.608862 | false |
kerimlcr/ab2017-dpyo | ornek/lollypop/lollypop-0.9.229/src/web.py | 1 | 7411 | # Copyright (c) 2014-2016 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import GObject, GLib
from threading import Thread
from time import time
from lollypop.sqlcursor import SqlCursor
from lollypop.tagreader import TagReader
from lollypop.web_youtube import WebYouTube
from lollypop.web_jgm90 import WebJmg90
from lollypop.define import Lp, DbPersistent, Type
from lollypop.lio import Lio
class Web(GObject.Object):
"""
Web helper
"""
__gsignals__ = {
'saved': (GObject.SignalFlags.RUN_FIRST, None, (int,)),
'progress': (GObject.SignalFlags.RUN_FIRST, None, (float,))
}
def play_track(track, play, callback):
"""
Play track
@param track as Track
@param play as bool
@param callback as func(uri: str, track: Track, play: bool)
"""
if track.is_jgm:
uri = WebJmg90.get_uri_content(track.uri)
elif track.is_youtube:
uri = WebYouTube.get_uri_content(track.uri)
else:
return
GLib.idle_add(callback, uri, track, play)
def __init__(self):
"""
Init helper
"""
GObject.Object.__init__(self)
self.__helpers = [WebJmg90(), WebYouTube()]
def save_track(self, item, persistent):
"""
Save item into collection as track
@param item as SearchItem
@param persistent as DbPersistent
"""
t = Thread(target=self.__save_track_thread, args=(item, persistent))
t.daemon = True
t.start()
def save_album(self, item, persistent):
"""
Save item into collection as album
@param item as SearchItem
@param persistent as DbPersistent
"""
t = Thread(target=self.__save_album_thread,
args=(item, persistent))
t.daemon = True
t.start()
#######################
# PRIVATE #
#######################
def __save_album_thread(self, item, persistent):
"""
Save item into collection as album
@param item as SearchItem
@param persistent as DbPersistent
"""
nb_items = len(item.subitems)
# Should not happen but happen :-/
if nb_items == 0:
return
start = 0
album_artist = item.subitems[0].artists[0]
album_id = None
for track_item in item.subitems:
(album_id, track_id) = self.__save_track(track_item, persistent,
album_artist)
if track_id is None:
continue
# Download cover
if start == 0:
t = Thread(target=self.__save_cover, args=(item, album_id))
t.daemon = True
t.start()
start += 1
GLib.idle_add(self.emit, "progress", start / nb_items)
GLib.idle_add(self.emit, "progress", 1)
if Lp().settings.get_value('artist-artwork'):
Lp().art.cache_artists_info()
if album_id is not None:
GLib.idle_add(self.emit, "saved", album_id)
def __save_track_thread(self, item, persistent):
"""
Save item into collection as track
@param item as SearchItem
@param persistent as DbPersistent
"""
album_artist = item.artists[0]
(album_id, track_id) = self.__save_track(item, persistent,
album_artist)
if track_id is None:
return
self.__save_cover(item, album_id)
if Lp().settings.get_value('artist-artwork'):
Lp().art.cache_artists_info()
GLib.idle_add(self.emit, "saved", track_id)
def __save_track(self, item, persistent, album_artist):
"""
Save item into collection as track
@param item as SearchItem
@param persistent as DbPersistent
@param album artist as str
@return (album id as int, track id as int)
"""
# Get uri from helpers
for helper in self.__helpers:
uri = helper.get_uri(item)
if uri:
break
# Don't found anything
if not uri:
return (None, None)
track_id = Lp().tracks.get_id_by_uri(uri)
# Check if track needs to be updated
if track_id is not None:
if Lp().tracks.get_persistent(track_id) == DbPersistent.NONE\
and persistent == DbPersistent.EXTERNAL:
Lp().tracks.set_persistent(track_id, DbPersistent.EXTERNAL)
return (None, None)
t = TagReader()
with SqlCursor(Lp().db) as sql:
# Happen often with Itunes/Spotify
if album_artist not in item.artists:
item.artists.append(album_artist)
artists = "; ".join(item.artists)
artist_ids = t.add_artists(artists, album_artist, "")
album_artist_ids = t.add_album_artists(album_artist, "")
(album_id, new_album) = t.add_album(item.album,
album_artist_ids, "",
False, 0, 0, int(time()), True)
# FIXME: Check this, could move this in add_album()
if new_album:
Lp().albums.set_synced(album_id, Type.NONE)
if persistent == DbPersistent.CHARTS:
genre_ids = [Type.CHARTS]
new_artist_ids = []
else:
new_artist_ids = list(set(artist_ids) | set(album_artist_ids))
genre_ids = t.add_genres("Web", album_id)
# Add track to db
track_id = Lp().tracks.add(item.name, uri, item.duration,
0, item.discnumber, "", album_id,
item.year, 0, 0, 0, persistent)
t.update_track(track_id, artist_ids, genre_ids)
t.update_album(album_id, album_artist_ids, genre_ids, None)
sql.commit()
for genre_id in genre_ids:
GLib.idle_add(Lp().scanner.emit, 'genre-updated', genre_id, True)
for artist_id in new_artist_ids:
GLib.idle_add(Lp().scanner.emit, 'artist-updated', artist_id, True)
return (album_id, track_id)
def __save_cover(self, item, album_id):
"""
Save cover to store
@param item as SearchItem
@param album id as int
"""
f = Lio.File.new_for_uri(item.cover)
(status, data, tag) = f.load_contents(None)
if status:
Lp().art.save_album_artwork(data, album_id)
| gpl-3.0 | 2,696,436,820,573,915,000 | 36.619289 | 79 | 0.54716 | false |
pybel/pybel | src/pybel/io/nodelink.py | 1 | 7238 | # -*- coding: utf-8 -*-
"""Conversion functions for BEL graphs with node-link JSON."""
import gzip
import json
from io import BytesIO
from itertools import chain, count
from operator import methodcaller
from typing import Any, Mapping, TextIO, Union
from networkx.utils import open_file
from .utils import ensure_version
from ..constants import (
ANNOTATIONS, CITATION, FUSION, GRAPH_ANNOTATION_CURIE, GRAPH_ANNOTATION_LIST, GRAPH_ANNOTATION_MIRIAM, MEMBERS,
PARTNER_3P,
PARTNER_5P, PRODUCTS, REACTANTS, SOURCE_MODIFIER, TARGET_MODIFIER,
)
from ..dsl import BaseEntity
from ..language import citation_dict
from ..struct import BELGraph
from ..struct.graph import _handle_modifier
from ..tokens import parse_result_to_dsl
from ..utils import hash_edge, tokenize_version
__all__ = [
'to_nodelink',
'to_nodelink_file',
'to_nodelink_gz',
'to_nodelink_jsons',
'from_nodelink',
'from_nodelink_file',
'from_nodelink_gz',
'from_nodelink_jsons',
'to_nodelink_gz_io',
'from_nodelink_gz_io',
]
def to_nodelink(graph: BELGraph) -> Mapping[str, Any]:
"""Convert this graph to a node-link JSON object.
:param graph: BEL Graph
"""
graph_json_dict = _to_nodelink_json_helper(graph)
_prepare_graph_dict(graph_json_dict['graph'])
return graph_json_dict
def _prepare_graph_dict(g):
# Convert annotation list definitions (which are sets) to canonicalized/sorted lists
g[GRAPH_ANNOTATION_LIST] = {
keyword: list(sorted(values))
for keyword, values in g.get(GRAPH_ANNOTATION_LIST, {}).items()
}
g[GRAPH_ANNOTATION_CURIE] = list(sorted(g[GRAPH_ANNOTATION_CURIE]))
g[GRAPH_ANNOTATION_MIRIAM] = list(sorted(g[GRAPH_ANNOTATION_MIRIAM]))
@open_file(1, mode='w')
def to_nodelink_file(graph: BELGraph, path: Union[str, TextIO], **kwargs) -> None:
"""Write this graph as node-link JSON to a file.
:param graph: A BEL graph
:param path: A path or file-like
"""
graph_json_dict = to_nodelink(graph)
json.dump(graph_json_dict, path, ensure_ascii=False, **kwargs)
def to_nodelink_gz(graph, path: str, **kwargs) -> None:
"""Write a graph as node-link JSON to a gzip file."""
with gzip.open(path, 'wt') as file:
json.dump(to_nodelink(graph), file, ensure_ascii=False, **kwargs)
def to_nodelink_jsons(graph: BELGraph, **kwargs) -> str:
"""Dump this graph as a node-link JSON object to a string."""
return json.dumps(to_nodelink(graph), ensure_ascii=False, **kwargs)
def from_nodelink(graph_json_dict: Mapping[str, Any], check_version: bool = True) -> BELGraph:
"""Build a graph from node-link JSON Object."""
pybel_version = tokenize_version(graph_json_dict['graph']['pybel_version'])
if pybel_version[1] < 14: # if minor version is less than 14
raise ValueError('Invalid NodeLink JSON from old version of PyBEL (v{}.{}.{})'.format(*pybel_version))
graph = _from_nodelink_json_helper(graph_json_dict)
return ensure_version(graph, check_version=check_version)
@open_file(0, mode='r')
def from_nodelink_file(path: Union[str, TextIO], check_version: bool = True) -> BELGraph:
"""Build a graph from the node-link JSON contained in the given file.
:param path: A path or file-like
"""
return from_nodelink(json.load(path), check_version=check_version)
def from_nodelink_gz(path: str) -> BELGraph:
"""Read a graph as node-link JSON from a gzip file."""
with gzip.open(path, 'rt') as file:
return from_nodelink(json.load(file))
def from_nodelink_jsons(graph_json_str: str, check_version: bool = True) -> BELGraph:
"""Read a BEL graph from a node-link JSON string."""
return from_nodelink(json.loads(graph_json_str), check_version=check_version)
def _to_nodelink_json_helper(graph: BELGraph) -> Mapping[str, Any]:
"""Convert a BEL graph to a node-link format.
:param graph: BEL Graph
Adapted from :func:`networkx.readwrite.json_graph.node_link_data`
"""
nodes = sorted(graph, key=methodcaller('as_bel'))
mapping = dict(zip(nodes, count()))
return {
'directed': True,
'multigraph': True,
'graph': graph.graph.copy(),
'nodes': [
_augment_node(node)
for node in nodes
],
'links': [
dict(
chain(
data.copy().items(),
[('source', mapping[u]), ('target', mapping[v]), ('key', key)],
),
)
for u, v, key, data in graph.edges(keys=True, data=True)
],
}
def _augment_node(node: BaseEntity) -> BaseEntity:
"""Add the SHA-512 identifier to a node's dictionary."""
rv = node.copy()
rv['id'] = node.md5
rv['bel'] = node.as_bel()
for m in chain(node.get(MEMBERS, []), node.get(REACTANTS, []), node.get(PRODUCTS, [])):
m.update(_augment_node(m))
if FUSION in node:
node[FUSION][PARTNER_3P].update(_augment_node(node[FUSION][PARTNER_3P]))
node[FUSION][PARTNER_5P].update(_augment_node(node[FUSION][PARTNER_5P]))
return rv
def _recover_graph_dict(graph: BELGraph):
graph.graph[GRAPH_ANNOTATION_LIST] = {
keyword: set(values)
for keyword, values in graph.graph.get(GRAPH_ANNOTATION_LIST, {}).items()
}
graph.graph[GRAPH_ANNOTATION_CURIE] = set(graph.graph.get(GRAPH_ANNOTATION_CURIE, []))
graph.graph[GRAPH_ANNOTATION_MIRIAM] = set(graph.graph.get(GRAPH_ANNOTATION_MIRIAM, []))
def _from_nodelink_json_helper(data: Mapping[str, Any]) -> BELGraph:
"""Return graph from node-link data format.
Adapted from :func:`networkx.readwrite.json_graph.node_link_graph`
"""
graph = BELGraph()
graph.graph = data.get('graph', {})
_recover_graph_dict(graph)
mapping = []
for node_data in data['nodes']:
node = parse_result_to_dsl(node_data)
graph.add_node_from_data(node)
mapping.append(node)
for data in data['links']:
u = mapping[data['source']]
v = mapping[data['target']]
edge_data = {
k: v
for k, v in data.items()
if k not in {'source', 'target', 'key'}
}
for side in (SOURCE_MODIFIER, TARGET_MODIFIER):
side_data = edge_data.get(side)
if side_data:
_handle_modifier(side_data)
if CITATION in edge_data:
edge_data[CITATION] = citation_dict(**edge_data[CITATION])
if ANNOTATIONS in edge_data:
edge_data[ANNOTATIONS] = graph._clean_annotations(edge_data[ANNOTATIONS])
graph.add_edge(u, v, key=hash_edge(u, v, edge_data), **edge_data)
return graph
def to_nodelink_gz_io(graph: BELGraph) -> BytesIO:
"""Get a BEL graph as a compressed BytesIO."""
bytes_io = BytesIO()
with gzip.GzipFile(fileobj=bytes_io, mode='w') as file:
s = to_nodelink_jsons(graph)
file.write(s.encode('utf-8'))
bytes_io.seek(0)
return bytes_io
def from_nodelink_gz_io(bytes_io: BytesIO) -> BELGraph:
"""Get BEL from gzipped nodelink JSON."""
with gzip.GzipFile(fileobj=bytes_io, mode='r') as file:
s = file.read()
j = s.decode('utf-8')
return from_nodelink_jsons(j)
| mit | 2,647,336,991,651,230,700 | 31.168889 | 115 | 0.635811 | false |
seecr/meresco-examples | meresco/__init__.py | 1 | 1394 | ## begin license ##
#
# "Meresco Examples" is a project demonstrating some of the
# features of various components of the "Meresco Suite".
# Also see http://meresco.org.
#
# Copyright (C) 2007-2008 SURF Foundation. http://www.surf.nl
# Copyright (C) 2007-2010 Seek You Too (CQ2) http://www.cq2.nl
# Copyright (C) 2007-2009 Stichting Kennisnet Ict op school. http://www.kennisnetictopschool.nl
# Copyright (C) 2009 Delft University of Technology http://www.tudelft.nl
# Copyright (C) 2009 Tilburg University http://www.uvt.nl
#
# This file is part of "Meresco Examples"
#
# "Meresco Examples" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Examples" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Examples"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| gpl-2.0 | -214,853,663,805,945,660 | 42.5625 | 95 | 0.738164 | false |
Mozu/mozu-python-sdk | mozurestsdk/platform/tenantextensions.py | 1 | 2242 |
"""
This code was generated by Codezu.
Changes to this file may cause incorrect behavior and will be lost if
the code is regenerated.
"""
from mozurestsdk.mozuclient import default as default_client
from mozurestsdk.mozuurl import MozuUrl;
from mozurestsdk.urllocation import UrlLocation
from mozurestsdk.apicontext import ApiContext;
class TenantExtensions(object):
def __init__(self, apiContext: ApiContext = None, mozuClient = None):
self.client = mozuClient or default_client();
if (apiContext is not None):
self.client.withApiContext(apiContext);
else:
self.client.withApiContext(ApiContext());
def getExtensions(self,responseFields = None):
""" Retrieves the Arc.js configuration settings for a site.
Args:
| responseFields (string) - Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object. This parameter should only be used to retrieve data. Attempting to update data using this parameter may cause data loss.
Returns:
| TenantExtensions
Raises:
| ApiException
"""
url = MozuUrl("/api/platform/extensions/?responseFields={responseFields}", "GET", UrlLocation.TenantPod, False);
url.formatUrl("responseFields", responseFields);
self.client.withResourceUrl(url).execute();
return self.client.result();
def updateExtensions(self,extensions, responseFields = None):
""" Updates the Arc.js configuration settings for a site.
Args:
| extensions(extensions) - The updated details of the Arc.js configuration settings.
| responseFields (string) - Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object. This parameter should only be used to retrieve data. Attempting to update data using this parameter may cause data loss.
Returns:
| TenantExtensions
Raises:
| ApiException
"""
url = MozuUrl("/api/platform/extensions/?responseFields={responseFields}", "PUT", UrlLocation.TenantPod, False);
url.formatUrl("responseFields", responseFields);
self.client.withResourceUrl(url).withBody(extensions).execute();
return self.client.result();
| apache-2.0 | -7,380,671,656,172,497,000 | 32.181818 | 266 | 0.727029 | false |
nigelb/SerialGrabber | examples/MQTT/SerialGrabber_Settings.py | 1 | 2043 | #!/usr/bin/env python
# SerialGrabber reads data from a serial port and processes it with the
# configured processor.
# Copyright (C) 2012 NigelB
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import serial
from serial_grabber.extractors import TransactionExtractor
from serial_grabber.reader.SerialReader import SerialReader
from serial_grabber.processor.UploadProcessor import UploadProcessor
from serial_grabber.processor import CompositeProcessor
from serial_grabber.mqtt import MqttCommander
from serial_grabber.connections import SerialConnection
# Serial Settings
timeout = 1
port = "/dev/ttyUSB0"
baud = 57600
parity = serial.PARITY_NONE
stop_bits = 1
# MQTT settings
mqtt_host = "localhost"
mqtt_port = 1883
mqtt_auth = ('system', 'manager')
# Settings
cache_collision_avoidance_delay = 1
processor_sleep = 1
watchdog_sleep = 1
reader_error_sleep = 1
drop_carriage_return = True
transaction = TransactionExtractor("default", "BEGIN DATA", "END DATA")
reader = SerialReader(transaction,
1000,
SerialConnection(port, baud, timeout=timeout,
parity=parity, stop_bits=stop_bits))
commander = MqttCommander(mqtt_host, mqtt_port, mqtt_auth)
uploadProcessor = UploadProcessor("https://example.org/cgi-bin/upload.py")
processor = CompositeProcessor([commander.processor, uploadProcessor])
| gpl-2.0 | 1,560,382,958,900,706,000 | 33.05 | 75 | 0.751836 | false |
lqmanh/daethon | test_daethon.py | 1 | 1817 | import os
import sys
import time
import pytest
from daethon import Daemon
class TDaemon(Daemon):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
with open('testing_daemon', 'w') as f:
f.write('inited')
def run(self):
time.sleep(1)
with open('testing_daemon', 'w') as f:
f.write('finished')
def file_contains(path, s):
with open(path) as f:
return f.read() == s
def control_daemon(action):
os.system(' '.join((sys.executable, __file__, action)))
@pytest.fixture
def context():
control_daemon('start')
time.sleep(0.5)
yield
if os.path.exists('testing_daemon.pid'):
control_daemon('stop')
time.sleep(0.5)
os.system('rm testing_daemon*') # clean up files if necessary
def test_daemon_can_start(context):
assert os.path.exists('testing_daemon.pid')
assert file_contains('testing_daemon', 'inited')
def test_daemon_can_stop(context):
control_daemon('stop')
time.sleep(0.5)
assert not os.path.exists('testing_daemon.pid')
assert file_contains('testing_daemon', 'inited')
def test_daemon_can_finish(context):
time.sleep(1)
assert not os.path.exists('testing_daemon.pid')
assert file_contains('testing_daemon', 'finished')
def test_daemon_can_restart(context):
with open('testing_daemon.pid') as f:
pid1 = f.read()
time.sleep(0.5)
control_daemon('restart')
time.sleep(0.5)
with open('testing_daemon.pid') as f:
pid2 = f.read()
assert pid1 != pid2
if __name__ == '__main__':
if len(sys.argv) == 2:
arg = sys.argv[1]
if arg in ('start', 'stop', 'restart'):
d = TDaemon('testing_daemon.pid', verbose=0)
getattr(d, arg)()
else:
pytest.main()
| apache-2.0 | -3,047,666,419,461,129,700 | 22.597403 | 66 | 0.603192 | false |
msteinhoff/foption-bot | src/python/core/messages.py | 1 | 2244 | # -*- coding: UTF-8 -*-
"""
$Id$
$URL$
Copyright (c) 2010 foption
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@since Jan 6, 2011
@author Mario Steinhoff
This file contains all messages with associated message numbers that are
used through the whole project. When defining new messages, please use
named parameters wherever possible.
Currently, the following number ranges are defined:
00000-09999: Core
00001-00999: Bot
01000-01999: Config
10000-19999: Modules
20000-29999: Interaction
20001-20200: IRC
"""
__version__ = '$Rev$'
__all__ = [
'message'
]
message = {}
message[01000] = 'configuration saved'
message[01001] = 'unable to save configuration'
message[01002] = 'configuration loaded'
message[01003] = 'unable to load configuration: config file was found'
message[20001] = ''
message[20002] = ''
message[20003] = ''
message[20005] = ''
message[20006] = ''
message[20007] = ''
message[20008] = ''
message[20009] = ''
message[20010] = ''
message[20011] = ''
message[20012] = ''
message[20013] = ''
message[20014] = ''
message[20015] = ''
message[20016] = ''
#reply.add('deine mutter hat gefailed.')
#return "OHFUCKOHFUCKOHFUCK Etwas lief schief! Datenbankfehler"
#return "Error 555!"
#reply.add('Deine Mutter hat die Datenbank gefressen')
| mit | 6,695,913,913,734,353,000 | 27.769231 | 77 | 0.750446 | false |
logicabrity/aeon | test/test_measurement.py | 1 | 1229 | import time
import pytest
from aeon.measurement import Measurement
from aeon.errors import InvalidMeasurementState
def test_cant_start_measurement_twice():
m = Measurement("name", "group")
m.start()
with pytest.raises(InvalidMeasurementState):
m.start()
def test_cant_stop_measurement_before_starting_it():
m = Measurement("name", "group")
with pytest.raises(InvalidMeasurementState):
m.stop()
def test_cant_stop_measurement_twice():
m = Measurement("name", "group")
m.start()
m.stop()
with pytest.raises(InvalidMeasurementState):
m.stop()
def test_starting_measurement_increases_number_of_calls():
m = Measurement("name", "group")
assert m.calls == 0
m.start()
assert m.calls == 1
def test_measurement_measures_something():
m = Measurement("name", "group")
m.start()
time.sleep(1e-3)
m.stop()
elapsed = m.total_runtime
assert elapsed > 0
m.start()
time.sleep(1e-3)
m.stop()
elapsed_again = m.total_runtime
assert elapsed_again > elapsed
@pytest.mark.fixed
def test_measurement_has_name_and_group():
m = Measurement("name", "group")
assert m.name == "name"
assert m.group == "group"
| mit | -1,662,716,423,659,156,000 | 21.345455 | 58 | 0.656631 | false |
maas/maas | src/maasserver/middleware.py | 1 | 18463 | # Copyright 2012-2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Access middleware."""
import http.client
import json
import logging
from pprint import pformat
import sys
import traceback
import attr
from crochet import TimeoutError
from django.conf import settings
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.handlers.exception import get_exception_response
from django.http import (
Http404,
HttpResponse,
HttpResponseBadRequest,
HttpResponseForbidden,
HttpResponseRedirect,
)
from django.urls import get_resolver, get_urlconf, reverse
from django.utils.encoding import force_str
from django.utils.http import urlquote_plus
from maasserver import logger
from maasserver.clusterrpc.utils import get_error_message_for_exception
from maasserver.components import (
discard_persistent_error,
register_persistent_error,
)
from maasserver.enum import COMPONENT
from maasserver.exceptions import MAASAPIException
from maasserver.models.config import Config
from maasserver.models.node import RackController
from maasserver.rbac import rbac
from maasserver.rpc import getAllClients
from maasserver.utils.orm import is_retryable_failure
from provisioningserver.rpc.exceptions import (
NoConnectionsAvailable,
PowerActionAlreadyInProgress,
)
from provisioningserver.utils.shell import ExternalProcessError
# 'Retry-After' header sent for httplib.SERVICE_UNAVAILABLE
# responses.
RETRY_AFTER_SERVICE_UNAVAILABLE = 10
PUBLIC_URL_PREFIXES = [
# Login page: must be visible to anonymous users.
reverse("login"),
# Authentication: must be visible to anonymous users.
reverse("authenticate"),
reverse("discharge-request"),
# CSRF: only usable by logged in users, but returns FORBIDDEN instead of
# a redirect to the login page on request of an unauthenticated user.
reverse("csrf"),
# The combo loaders are publicly accessible.
reverse("robots"),
# Metadata service is for use by nodes; no login.
reverse("metadata"),
# RPC information is for use by rack controllers; no login.
reverse("rpc-info"),
# Prometheus metrics with usage stats
reverse("metrics"),
# API meta-information is publicly visible.
reverse("api_version"),
reverse("api_v1_error"),
# API calls are protected by piston.
settings.API_URL_PREFIX,
# Boot resources simple streams endpoint; no login.
settings.SIMPLESTREAMS_URL_PREFIX,
]
def is_public_path(path):
"""Whether a request.path is publicly accessible."""
return any(path.startswith(prefix) for prefix in PUBLIC_URL_PREFIXES)
class AccessMiddleware:
"""Protect access to views.
Most UI views are visible only to logged-in users, but there are pages
that are accessible to anonymous users (e.g. the login page!) or that
use other authentication (e.g. the MAAS API, which is managed through
piston).
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if is_public_path(request.path):
return self.get_response(request)
if request.user.is_anonymous:
return HttpResponseRedirect(
"/MAAS/?next=%s" % urlquote_plus(request.path)
)
return self.get_response(request)
class ExternalComponentsMiddleware:
"""Middleware to check external components at regular intervals."""
def __init__(self, get_response):
self.get_response = get_response
def _check_rack_controller_connectivity(self):
"""Check each rack controller to see if it's connected.
If any rack controllers are disconnected, add a persistent error.
"""
controllers = RackController.objects.all()
connected_ids = {client.ident for client in getAllClients()}
disconnected_controllers = {
controller
for controller in controllers
if controller.system_id not in connected_ids
}
if len(disconnected_controllers) == 0:
discard_persistent_error(COMPONENT.RACK_CONTROLLERS)
else:
if len(disconnected_controllers) == 1:
message = (
"One rack controller is not yet connected to the region"
)
else:
message = (
"%d rack controllers are not yet connected to the region"
% len(disconnected_controllers)
)
message = (
'%s. Visit the <a href="/MAAS/l/controllers">'
"rack controllers page</a> for "
"more information." % message
)
register_persistent_error(COMPONENT.RACK_CONTROLLERS, message)
def __call__(self, request):
# This middleware hijacks the request to perform checks. Any
# error raised during these checks should be caught to avoid
# disturbing the handling of the request. Proper error reporting
# should be handled in the check method itself.
self._check_rack_controller_connectivity()
return self.get_response(request)
class ExceptionMiddleware:
"""Convert exceptions into appropriate HttpResponse responses.
For example, a MAASAPINotFound exception processed by a middleware
based on this class will result in an http 404 response to the client.
Validation errors become "bad request" responses.
.. middleware: https://docs.djangoproject.com
/en/dev/topics/http/middleware/
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
try:
return self.get_response(request)
except Exception as exception:
response = self.process_exception(request, exception)
if response:
return response
else:
raise
def process_exception(self, request, exception):
encoding = "utf-8"
if isinstance(exception, MAASAPIException):
# Print a traceback if this is a 500 error.
if (
settings.DEBUG
or exception.api_error == http.client.INTERNAL_SERVER_ERROR
):
self.log_exception(exception)
# This type of exception knows how to translate itself into
# an http response.
return exception.make_http_response()
elif isinstance(exception, ValidationError):
if settings.DEBUG:
self.log_exception(exception)
if hasattr(exception, "message_dict"):
# Complex validation error with multiple fields:
# return a json version of the message_dict.
return HttpResponseBadRequest(
json.dumps(exception.message_dict),
content_type="application/json",
)
else:
# Simple validation error: return the error message.
return HttpResponseBadRequest(
str("".join(exception.messages)).encode(encoding),
content_type="text/plain; charset=%s" % encoding,
)
elif isinstance(exception, PermissionDenied):
if settings.DEBUG:
self.log_exception(exception)
return HttpResponseForbidden(
content=str(exception).encode(encoding),
content_type="text/plain; charset=%s" % encoding,
)
elif isinstance(exception, ExternalProcessError):
# Catch problems interacting with processes that the
# appserver spawns, e.g. rndc.
#
# While this is a serious error, it should be a temporary
# one as the admin should be checking and fixing, or it
# could be spurious. There's no way of knowing, so the best
# course of action is to ask the caller to repeat.
if settings.DEBUG:
self.log_exception(exception)
response = HttpResponse(
content=str(exception).encode(encoding),
status=int(http.client.SERVICE_UNAVAILABLE),
content_type="text/plain; charset=%s" % encoding,
)
response["Retry-After"] = RETRY_AFTER_SERVICE_UNAVAILABLE
return response
elif isinstance(exception, Http404):
if settings.DEBUG:
self.log_exception(exception)
return get_exception_response(
request, get_resolver(get_urlconf()), 404, exception
)
elif is_retryable_failure(exception):
# We never handle retryable failures.
return None
elif isinstance(exception, SystemExit):
return None
else:
# Print a traceback.
self.log_exception(exception)
# Return an API-readable "Internal Server Error" response.
return HttpResponse(
content=str(exception).encode(encoding),
status=int(http.client.INTERNAL_SERVER_ERROR),
content_type="text/plain; charset=%s" % encoding,
)
def log_exception(self, exception):
exc_info = sys.exc_info()
logger.error(" Exception: %s ".center(79, "#") % str(exception))
logger.error("".join(traceback.format_exception(*exc_info)))
class DebuggingLoggerMiddleware:
log_level = logging.DEBUG
def __init__(self, get_response):
self.get_response = get_response
# Taken straight out of Django 1.8 django.http.request module to improve
# our debug output on requests (dropped in Django 1.9).
@classmethod
def _build_request_repr(
self,
request,
path_override=None,
GET_override=None,
POST_override=None,
COOKIES_override=None,
META_override=None,
):
"""
Builds and returns the request's representation string. The request's
attributes may be overridden by pre-processed values.
"""
# Since this is called as part of error handling, we need to be very
# robust against potentially malformed input.
try:
get = (
pformat(GET_override)
if GET_override is not None
else pformat(request.GET)
)
except Exception:
get = "<could not parse>"
try:
post = (
pformat(POST_override)
if POST_override is not None
else pformat(request.POST)
)
except Exception:
post = "<could not parse>"
try:
cookies = (
pformat(COOKIES_override)
if COOKIES_override is not None
else pformat(request.COOKIES)
)
except Exception:
cookies = "<could not parse>"
try:
meta = (
pformat(META_override)
if META_override is not None
else pformat(request.META)
)
except Exception:
meta = "<could not parse>"
path = path_override if path_override is not None else request.path
name = request.__class__.__name__
return force_str(
f"<{name}\npath:{path},\nGET:{get},\nPOST:{post},\nCOOKIES:{cookies},\nMETA:{meta}>"
)
def __call__(self, request):
if settings.DEBUG_HTTP and logger.isEnabledFor(self.log_level):
header = " Request dump ".center(79, "#")
logger.log(
self.log_level,
"%s\n%s",
header,
self._build_request_repr(request),
)
response = self.get_response(request)
if settings.DEBUG_HTTP and logger.isEnabledFor(self.log_level):
header = " Response dump ".center(79, "#")
content = getattr(response, "content", "{no content}")
try:
decoded_content = content.decode("utf-8")
except UnicodeDecodeError:
logger.log(
self.log_level,
"%s\n%s",
header,
"** non-utf-8 (binary?) content **",
)
else:
logger.log(self.log_level, "%s\n%s", header, decoded_content)
return response
class RPCErrorsMiddleware:
"""A middleware for handling RPC errors."""
handled_exceptions = (
NoConnectionsAvailable,
PowerActionAlreadyInProgress,
TimeoutError,
)
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
try:
return self.get_response(request)
except Exception as exception:
response = self.process_exception(request, exception)
if response:
return response
else:
raise
def process_exception(self, request, exception):
if request.path.startswith(settings.API_URL_PREFIX):
# Not a path we're handling exceptions for.
# APIRPCErrorsMiddleware handles all the API request RPC
# errors.
return None
if not isinstance(exception, self.handled_exceptions):
# Nothing to do, since we don't care about anything other
# than handled_exceptions.
return None
logging.exception(exception)
return HttpResponseRedirect(request.path)
class APIRPCErrorsMiddleware(RPCErrorsMiddleware):
"""A middleware for handling RPC errors in API requests."""
handled_exceptions = {
NoConnectionsAvailable: int(http.client.SERVICE_UNAVAILABLE),
PowerActionAlreadyInProgress: int(http.client.SERVICE_UNAVAILABLE),
TimeoutError: int(http.client.GATEWAY_TIMEOUT),
}
def process_exception(self, request, exception):
if not request.path.startswith(settings.API_URL_PREFIX):
# Not a path we're handling exceptions for.
# RPCErrorsMiddleware handles non-API requests.
return None
if exception.__class__ not in self.handled_exceptions:
# This isn't something we handle; allow processing to
# continue.
return None
status = self.handled_exceptions[exception.__class__]
logging.exception(exception)
error_message = get_error_message_for_exception(exception)
encoding = "utf-8"
response = HttpResponse(
content=error_message.encode(encoding),
status=status,
content_type="text/plain; charset=%s" % encoding,
)
if status == http.client.SERVICE_UNAVAILABLE:
response["Retry-After"] = RETRY_AFTER_SERVICE_UNAVAILABLE
return response
class CSRFHelperMiddleware:
"""A Middleware to decide whether a request needs to be protected against
CSRF attacks.
Requests with a session cookie (i.e. requests for which the basic
session-based Django authentification is used) will be CSRF protected.
Requests without this cookie are pure 0-legged API requests and thus don't
need to use the CSRF protection machinery because each request is signed.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
session_cookie = request.COOKIES.get(
settings.SESSION_COOKIE_NAME, None
)
if session_cookie is None:
# csrf_processing_done is a field used by Django. We use it here
# to bypass the CSRF protection when it's not needed (i.e. when the
# request is OAuth-authenticated).
request.csrf_processing_done = True
return self.get_response(request)
@attr.s
class ExternalAuthInfo:
"""Hold information about external authentication."""
type = attr.ib()
url = attr.ib()
domain = attr.ib(default="")
admin_group = attr.ib(default="")
class ExternalAuthInfoMiddleware:
"""A Middleware adding information about the external authentication.
This adds an `external_auth_info` attribute to the request, which is an
ExternalAuthInfo instance if external authentication is enabled, None
otherwise.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
configs = Config.objects.get_configs(
[
"external_auth_url",
"external_auth_domain",
"external_auth_admin_group",
"rbac_url",
]
)
rbac_endpoint = configs.get("rbac_url")
candid_endpoint = configs.get("external_auth_url")
auth_endpoint, auth_domain, auth_admin_group = "", "", ""
if rbac_endpoint:
auth_type = "rbac"
auth_endpoint = rbac_endpoint.rstrip("/") + "/auth"
elif candid_endpoint:
auth_type = "candid"
auth_endpoint = candid_endpoint
auth_domain = configs.get("external_auth_domain")
auth_admin_group = configs.get("external_auth_admin_group")
auth_info = None
if auth_endpoint:
# strip trailing slashes as js-bakery ends up using double slashes
# in the URL otherwise
auth_info = ExternalAuthInfo(
type=auth_type,
url=auth_endpoint.rstrip("/"),
domain=auth_domain,
admin_group=auth_admin_group,
)
request.external_auth_info = auth_info
return self.get_response(request)
class RBACMiddleware:
"""Middleware that cleans the RBAC thread-local cache.
At the end of each request the RBAC client that is held in the thread-local
needs to be cleaned up. That way the next request on the same thread will
use a new RBAC client.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
result = self.get_response(request)
# Now that the response has been handled, clear the thread-local
# state of the RBAC connection.
rbac.clear()
return result
| agpl-3.0 | -8,170,894,622,841,870,000 | 34.505769 | 96 | 0.611222 | false |
ivanamihalek/tcga | icgc/60_nextgen_production/65_reactome_tree.py | 1 | 5057 | #! /usr/bin/python3
#
# This source code is part of icgc, an ICGC processing pipeline.
#
# Icgc is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Icgc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see<http://www.gnu.org/licenses/>.
#
# Contact: ivana.mihalek@gmail.com
#
# some pathways do not have the associated genes listed, probably by mistake
# examples:
# R-HSA-1483171 | Synthesis of BMP
# R-HSA-2408499 | Formation of selenosugars for excretion
from icgc_utils.common_queries import quotify
from icgc_utils.reactome import *
from config import Config
############
def print_genes(cursor, gene_ids, depth):
if len(gene_ids)<1:
print("\t"*depth, "no genes listed")
return
#print("\t"*depth, "print genes here")
gene_id_string = ",".join([quotify(z) for z in gene_ids])
qry = "select ensembl_gene_id, approved_name from hgnc where ensembl_gene_id in (%s)" % gene_id_string
gene_names = dict(hard_landing_search(cursor, qry))
qry = "select ensembl_gene_id, approved_symbol from hgnc where ensembl_gene_id in (%s)" % gene_id_string
gene_symbols = dict(hard_landing_search(cursor, qry))
for gene in gene_ids:
print("\t"*depth, gene_symbols.get(gene,""), gene_names.get(gene,""))
return
##############
def characterize_subtree(cursor, graph, pthwy_id, gene_groups, depth, verbose=True):
# this is the whole subtree
# children = [node for node in nx.dfs_preorder_nodes(graph, pthwy_id)]
# A successor of n is a node m such that there exists a directed edge from n to m.
children = [node for node in graph.successors(pthwy_id)]
if len(children)==0: return False
node_id_string = ",".join([quotify(z) for z in children])
qry_template = "select * from reactome_pathways where reactome_pathway_id in (%s)"
children_names = hard_landing_search(cursor, qry_template % node_id_string)
for child_id, child_name in children_names:
# number_of_genes = genes related to nodes without descendants
genes = genes_in_subgraph(cursor, graph, child_id)
if verbose: print("\t"*depth, child_id, child_name, len(genes))
if len(genes)<100:
if verbose: print_genes(cursor, genes, depth+1)
gene_groups[child_name] = genes
continue
if not characterize_subtree(cursor, graph, child_id, gene_groups, depth+1, verbose=verbose): # no further subdivisions
if verbose: print_genes(cursor, genes, depth+1)
gene_groups[child_name] = genes
continue
return True
#########################################
import numpy as np
from matplotlib import pyplot as plt
def hist_plot(gene_groups):
data = [len(gene_list) for gene_list in list(gene_groups.values())]
# fixed bin size
bins = np.arange(0, 505, 5) # fixed bin size
plt.xlim(0,500)
plt.hist(data, bins=bins, alpha=0.5)
# plt.title('')
plt.xlabel('number of genes in group (bin size = 5)')
plt.ylabel('number of groups')
#
plt.show()
####################################################
def main():
verbose = False
db = connect_to_mysql(Config.mysql_conf_file)
cursor = db.cursor()
switch_to_db(cursor, 'icgc')
# are there children with multiple parents? Yes. So I need some kind of
# directed graph, rather tha a tree.
qry = "select child, count(distinct parent) as ct from reactome_hierarchy "
qry += "group by child having ct>1"
ret = search_db(cursor, qry)
print("number of children with multiple parents:", len(ret))
# feed the parent/child pairs as edges into graph
graph = build_reactome_graph(cursor, verbose=True)
# candidate roots
zero_in_degee_nodes = get_roots(graph)
node_id_string = ",".join([quotify(z) for z in zero_in_degee_nodes])
qry_template = "select * from reactome_pathways where reactome_pathway_id in (%s)"
root_names = hard_landing_search(cursor, qry_template% node_id_string)
gene_groups = {}
for pthwy_id, name in root_names:
if "disease" in name.lower(): continue
if verbose: print(pthwy_id, name)
characterize_subtree(cursor, graph, pthwy_id, gene_groups, 1, verbose=verbose)
print("\n===========================")
max_group=0
for group, genes in gene_groups.items():
groupsize = len(genes)
if max_group< groupsize: max_group=groupsize
print (group, len(genes))
print("\n===========================")
print("number of groups", len(gene_groups))
print("largest group", max_group)
print("\n===========================")
for pthwy_name, genes in gene_groups.items():
if len(genes)<=150: continue
print("\n",pthwy_name, len(genes))
#print_genes(cursor, genes, 1)
#hist_plot(gene_groups)
cursor.close()
db.close()
#########################################
if __name__ == '__main__':
main()
| gpl-3.0 | -438,034,926,294,668,700 | 35.121429 | 120 | 0.677477 | false |
soccermetrics/marcotti-mls | marcottimls/models/financial.py | 1 | 5658 | from sqlalchemy import Column, Integer, String, Sequence, ForeignKey, ForeignKeyConstraint, Boolean
from sqlalchemy.orm import relationship, backref
from sqlalchemy.schema import CheckConstraint
import enums
from common import BaseSchema
class AcquisitionPaths(BaseSchema):
"""
MLS player acquisition data model.
Captures **initial** entry path into league.
"""
__tablename__ = 'acquisitions'
player_id = Column(Integer, ForeignKey('players.id'), primary_key=True)
year_id = Column(Integer, ForeignKey('years.id'), primary_key=True)
path = Column(enums.AcquisitionType.db_type())
discriminator = Column('type', String(20))
club_id = Column(Integer, ForeignKey('clubs.id'))
club = relationship('Clubs', backref=backref('acquisitions'))
player = relationship('Players', backref=backref('entry'))
year = relationship('Years', backref=backref('acquisitions'))
__mapper_args__ = {
'polymorphic_identity': 'acquisitions',
'polymorphic_on': discriminator
}
class PlayerDrafts(AcquisitionPaths):
"""
Player draft data model.
"""
__mapper_args__ = {'polymorphic_identity': 'draft'}
round = Column(Integer, CheckConstraint('round > 0'))
selection = Column(Integer, CheckConstraint('selection > 0'))
gen_adidas = Column(Boolean, default=False)
def __repr__(self):
return u"<PlayerDraft(name={0}, year={1}, round={2}, selection={3}, generation_adidas={4})>".format(
self.player.full_name, self.year.yr, self.round, self.selection, self.gen_adidas).encode('utf-8')
def __unicode__(self):
return u"<PlayerDraft(name={0}, year={1}, round={2}, selection={3}, generation_adidas={4})>".format(
self.player.full_name, self.year.yr, self.round, self.selection, self.gen_adidas)
class PlayerSalaries(BaseSchema):
"""
Player salary data model.
"""
__tablename__ = 'salaries'
__table_args__ = (
ForeignKeyConstraint(
['competition_id', 'season_id'],
['competition_seasons.competition_id', 'competition_seasons.season_id'],
),
)
id = Column(Integer, Sequence('salary_id_seq', start=10000), primary_key=True)
base_salary = Column(Integer, CheckConstraint('base_salary >= 0'), doc="Base salary in cents")
avg_guaranteed = Column(Integer, CheckConstraint('avg_guaranteed >= 0'),
doc="Average annualized guaranteed compensation in cents")
player_id = Column(Integer, ForeignKey('players.id'))
club_id = Column(Integer, ForeignKey('clubs.id'))
competition_id = Column(Integer)
season_id = Column(Integer)
player = relationship('Players', backref=backref('salaries'))
club = relationship('Clubs', backref=backref('payroll'))
comp_season = relationship('CompetitionSeasons', backref=backref('payroll'))
def __repr__(self):
return u"<PlayerSalary(name={0}, club={1}, competition={2}, season={3}, base={4:.2f}, " \
u"guaranteed={5:.2f})>".format(self.player.full_name, self.club.name,
self.comp_season.competition.name, self.comp_season.season.name,
self.base_salary/100.00, self.avg_guaranteed/100.00).encode('utf-8')
def __unicode__(self):
return u"<PlayerSalary(name={0}, club={1}, competition={2}, season={3}, base={4:.2f}, " \
u"guaranteed={5:.2f})>".format(self.player.full_name, self.club.name,
self.comp_season.competition.name, self.comp_season.season.name,
self.base_salary / 100.00, self.avg_guaranteed / 100.00)
class PartialTenures(BaseSchema):
"""
Data model that captures player's partial-season tenure at a club.
"""
__tablename__ = 'partials'
__table_args__ = (
ForeignKeyConstraint(
['competition_id', 'season_id'],
['competition_seasons.competition_id', 'competition_seasons.season_id'],
),
)
id = Column(Integer, Sequence('partial_id_seq', start=10000), primary_key=True)
start_week = Column(Integer, CheckConstraint('start_week > 0'))
end_week = Column(Integer, CheckConstraint('end_week > 0'))
player_id = Column(Integer, ForeignKey('players.id'))
club_id = Column(Integer, ForeignKey('clubs.id'))
competition_id = Column(Integer)
season_id = Column(Integer)
player = relationship('Players', backref=backref('partials'))
club = relationship('Clubs', backref=backref('partials'))
comp_season = relationship('CompetitionSeasons', backref=backref('partials'))
def __repr__(self):
return u"<PartialTenure(name={0}, club={1}, competition={2}, season={3}, " \
u"start_week={4}, end_week={5})>".format(self.player.full_name, self.club.name,
self.comp_season.competition.name,
self.comp_season.season.name,
self.start_week, self.end_week).encode('utf-8')
def __unicode__(self):
return u"<PartialTenure(name={0}, club={1}, competition={2}, season={3}, " \
u"start_week={4}, end_week={5})>".format(self.player.full_name, self.club.name,
self.comp_season.competition.name,
self.comp_season.season.name,
self.start_week, self.end_week)
| mit | -645,660,621,350,676,700 | 42.19084 | 114 | 0.59597 | false |
koepferl/FluxCompensator | fluxcompensator/cube.py | 1 | 25142 | from copy import deepcopy
import os
ROOT = os.path.dirname(os.path.abspath(__file__)) + '/'
from astropy import log as logger
from astropy.io import fits
import numpy as np
from numpy.random import normal
from .psf import GaussianPSF, FilePSF, FunctionPSF
from .filter import Filter
from .utils.plot import MakePlots
from .utils.resolution import ConservingZoom, central
from .utils.tools import properties, grid_units, get_slices, average_collapse, central_wav
from .utils.units import ConvertUnits
# submitting PhD thesis today :)
class SyntheticCube(object):
'''
SyntheticCube is part the FluxCompensator. It converts
input_arrays (e. g. HYPERION ModelOutput) to "realistic"
synthetic observations (e.g. accounts for PSF, filters & noise).
It contains attributes like ModelOutput (see Notes).
If input_array is already a SyntheticCube object, the attributes are
passed. If input_array is not a SyntheticCube object, SyntheticCube
specific attributes are defined and then passed.
Parameters
----------
input_array : SyntheticCube, ModelOutput, optional
input_array also reads arrays with ModelOutput like properties.
unit_out : str, optional
The output units for SyntheticCube val. Valid options are:
* ``'ergs/cm^2/s'``
* ``'ergs/cm^2/s/Hz'``
* ``'Jy'``
* ``'mJy'``
* ``'MJy/sr'``
The default is ``'ergs/cm^2/s'``.
name : str
The name of the FluxCompensator object until another
input_array is called. The default is ``None``.
Attributes
----------
wav : numpy.ndarray
The wavelengths of val cube slices in microns.
val : numpy.ndarray
The 3D cube with shape (x, y, wav).
units : str
Current units of the val cube.
distance : str
Distance to the observed object in cm.
x_min : float
Physical offset from axis origin in FOV in cm.
x_max : float
Physical offset from axis origin in FOV in cm.
y_min : float
Physical offset from axis origin in FOV in cm.
y_max : float
Physical offset from axis origin in FOV in cm.
lon_min : float
Minimal longitudinal angle.
lon_max : float
Maximal longitudinal angle.
lat_min : float
Minimal latitudinal angle.
lat_max : float
Maximal latitudinal angle.
pix_area_sr : float
Pixel area per sr.
Notes
-----
unit_in : str
Unit of val in input_array. Valid options are:
* ``'ergs/cm^2/s'``
* ``'ergs/cm^2/s/Hz'``
* ``'Jy'``
* ``'mJy'``
* ``'MJy/sr'``
grid_unit : float
Physical unit of FOV axis in cm. Valid options are:
* ``au`` in cm
* ``pc`` in cm
* ``kpc`` in cm
grid_unit_name
Astronomical unit of FOV axis. Valid options are:
* ``'au'``
* ``'pc'``
* ``'kpc'``
FOV : tuple
Tuple ``FOV(x,y)`` of Field of View pixel entries.
* pixel in x direction: ``FOV[0]``
* pixel in y direction: ``FOV[1]``
name : str
The name of the FluxCompensator object until another
input_array is called. The default is ``None``.
stage : str
Gives current operation stage of SyntheticCube.
E. g. ``'SyntheticCube: convolve_filter'``
log : list
List of strings of the previous and current stages.
filter : dict
Dictionary ``filter = {name, waf_0, waf_min, waf_max}``
of the applied filter.
* name of filter: ``filter['name']``
* central wavelength: ``filter['waf_0']``
* minimal wavelength: ``filter['waf_min']``
* maximal wavelength: ``filter['waf_max']``
Returns
-------
cube : SyntheticCube
3D val array with SyntheticCube properties.
image : SyntheticImage
2D val array with SyntheticImage properties.
sed : SyntheticSED
1D val array (collapsed rough SED) with SyntheticSED properties.
flux : SyntheticFlux
0D val array (scalar) with SyntheticFlux properties.
'''
def __init__(self, input_array, unit_out='ergs/cm^2/s', name=None):
# Hyperion ModelOutput attributes
#if input_array.val.ndim == 3:
self.val = np.array(deepcopy(input_array.val))
#else:
# raise Exception('input_array does not have the right dimensions. numpy array of (x, y, wav) is required.')
self.wav = np.array(deepcopy(input_array.wav))
self.units = input_array.units
self.distance = input_array.distance
self.x_max = input_array.x_max
self.x_min = input_array.x_min
self.y_max = input_array.y_max
self.y_min = input_array.y_min
self.lon_min = input_array.lon_min
self.lon_max = input_array.lon_max
self.lat_min = input_array.lat_min
self.lat_max = input_array.lat_max
self.pix_area_sr = input_array.pix_area_sr
##################
# new attributes #
##################
if isinstance(input_array, SyntheticCube):
# attributes with are passed, since input_array is SyntheticCube
# physical values
self.unit_in = input_array.unit_in
self.unit_out = input_array.unit_out
self.grid_unit = input_array.grid_unit
self.grid_unit_name = input_array.grid_unit_name
# properties of cube
self.FOV = deepcopy(input_array.FOV)
# name
self.name = input_array.name
self.stage = input_array.stage
self.log = deepcopy(input_array.log)
# filter
self.filter = deepcopy(input_array.filter)
else: # attributes are defined, since input_array is NOT SyntheticCube
# physical values
self.unit_in = input_array.units
self.unit_out = unit_out
self.grid_unit = grid_units(self.x_max - self.x_min)['grid_unit']
self.grid_unit_name = grid_units(self.x_max - self.x_min)['grid_unit_name']
self.FOV = (self.x_max - self.x_min, self.y_max - self.y_min)
# name
self.name = name
self.stage = 'SyntheticCube: initial'
self.log = [self.stage]
# filter
self.filter = {'name': None, 'waf_0': None, 'waf_min': None, 'waf_max': None}
# convert into val units into unit_out
s = ConvertUnits(wav=self.wav, val=self.val)
self.val = s.get_unit(in_units=self.unit_in, out_units=self.unit_out, input_resolution=self.resolution['arcsec'])
self.units = self.unit_out
def extinction(self, A_v, input_opacities=None):
'''
Accounts for reddening.
Parameters
----------
A_v : Value of the visible extinction.
input_opacities : ``None``, str
If ``None`` standard extinction law is used.
Otherwise a e. g. input_opacities.txt file can be passed
as a str to read an opacity file with column #1 wav in microns
and column #2 in cm^2/g.
Default is ``None``.
Returns
-------
cube : SyntheticCube
'''
stage = 'SyntheticCube: extinction'
# read own extinction law
if input_opacities is None:
t = np.loadtxt(ROOT + 'database/extinction/extinction_law.txt')
else:
t = np.loadtxt(input_opacities)
wav_ext = t[:, 0]
k_lam = t[:, 1]
# wav_ext monotonically increasing
if wav_ext[0] > wav_ext[1]:
wav_ext = wav_ext[::-1]
k_lam = k_lam[::-1]
k_v = np.interp(0.550, wav_ext, k_lam)
# interpolate to get A_int for a certain wavelength
k = np.interp(self.wav, wav_ext, k_lam)
A_int_lam = A_v * (k / k_v)
# apply extinction law
val_ext = np.zeros(shape=np.shape(self.val))
val_ext[:,:,:len(self.wav)] = self.val[:,:,:len(self.wav)] * 10**(-0.4 * A_int_lam[:len(self.wav)])
# return SimulateCube
c = SyntheticCube(self)
c.val = val_ext
c.stage = stage
c.log.append(c.stage)
return c
def change_resolution(self, new_resolution, grid_plot=None):
'''
Changes the resolution of every slice of the val cube.
Parameters
----------
new_resolution : Resolution which the val array should get in ``arcsec/pixel.``
grid_plot : ``None``, ``True``
If ``True`` old and new resolution is visualized in a plot.
Default is ``None``.
Returns
-------
cube : SyntheticCube
'''
stage = 'SyntheticCube: change_resolution'
# debugging comment
logger.debug('-' * 70)
logger.debug(stage)
logger.debug('-' * 70)
logger.debug('total value before zoom : ' + str('%1.4e' % np.sum(self.val)) + ' ' + str(self.units))
# match resolution of psf and val slice
f = ConservingZoom(array=self.val, initial_resolution=self.resolution['arcsec'], new_resolution=new_resolution)
zoomed_val = f.zoom()
# average after changing resolution for MJy/sr
if self.units == 'MJy/sr' or self.units == 'Jy/arcsec^2':
# size of new pixel in units of old pixel
size = new_resolution ** 2 / self.resolution['arcsec'] ** 2
zoomed_val = zoomed_val / size
if grid_plot is not None:
f.zoom_grid(self.name)
# debugging comment
logger.debug('total value after zoom : ' + str('%1.4e' % np.sum(zoomed_val)) + ' ' + str(self.units))
# return SimulateCube
c = SyntheticCube(self)
c.val = zoomed_val
c.stage = stage
c.log.append(c.stage)
c.FOV = (f.len_nx / f.len_nrx * self.FOV[0], f.len_ny / f.len_nry * self.FOV[1])
return c
def central_pixel(self, dx, dy):
'''
Move array right and up to create a central pixel.
Returns
-------
cube : SyntheticCube
'''
stage = 'SyntheticCube: central_pixel'
ce = central(array=self.val, dx=dx, dy=dy)
len_x_old = float(self.pixel[0])
len_x_new = float(len(ce[:,0]))
len_y_old = float(self.pixel[1])
len_y_new = float(len(ce[0,:]))
old_FOV = self.FOV
new_FOV = (len_x_new / len_x_old * old_FOV[0], len_y_new / len_y_old * old_FOV[1])
# return SimulateCube
c = SyntheticCube(self)
c.val = ce
c.stage = stage
c.log.append(c.stage)
c.FOV = new_FOV
return c
def convolve_psf(self, psf):
'''
Convolves every slice of the val cube with a PSF of choice.
Parameters
----------
psf : GaussianPSF, FilePSF, database, FunctionPSF
* GaussianPSF(self, diameter): Convolves val with Gaussian PSF.
* FilePSF(self, psf_file, condensed): Reads PSF from input file.
* database: PSF object defined in FluxCompensator database.
* FunctionPSF(self, psf_function, width): Convolves val with calculated PSF.
Returns
-------
cube : SyntheticCube
'''
stage = 'SyntheticCube: convolve_PSF'
# debugging comments
if isinstance(psf, GaussianPSF):
logger.debug('-' * 70)
logger.debug(stage + 'with GaussianPSF')
logger.debug('-' * 70)
# convolve value with classes GaussianPSF, FilePSF and FunctionPSF
val = self.val.copy()
for i in range(len(self.wav)):
val[:, :, i] = psf.convolve(wav = self.wav[i], array = self.val[:,:, i], resolution = self.resolution)
# return SimulateCube
c = SyntheticCube(self)
c.val = val
c.stage = stage
c.log.append(c.stage)
return c
def convolve_filter(self, filter_input, plot_rebin=None, plot_rebin_dpi=None):
'''
Convolves slice within filter limits into a 2D image.
Parameters
----------
filter_input : object
* database : if filter ``name`` from FluxCompensator database is used.
* Filter : if own filter is used.
plot_rebin : ``True``, ``None``
Switch to plot the rebined filter and the original filter in one plot.
plot_rebin_dpi : ``None``, scalar > 0
The resolution in dots per inch.
``None`` is default and will use the value savefig.dpi
in the matplotlibrc file.
Returns
-------
image : SyntheticImage
'''
stage = 'SyntheticCube: convolve_filter'
# debugging comment
logger.debug('-' * 70)
logger.debug(stage)
logger.debug('-' * 70)
weight = filter_input.rebin(self.wav, self.val)
# returns weight{'wav_short' 'val_short' 'Response_new' 'filter_index' 'wavf_0' 'waf_min' 'waf_max' 'filter_name'}
wav_short = weight['wav_short']
val_short = weight['val_short']
filter_index = weight['filter_index']
Response_new = weight['Response_new']
waf_0 = weight['waf_0']
waf_min = weight['waf_min']
waf_max = weight['waf_max']
filter_name = weight['filter_name']
if plot_rebin is not None:
plot = filter_input.plot(val_name=self.name, dpi=plot_rebin_dpi)
# weight val_short with rebined response
val = val_short.copy()
val[:, :, :len(wav_short)] = val_short[:,:, :len(wav_short)] * Response_new[:len(wav_short)]
# collapse remaining cube into 2D
val_2D = np.sum(val, axis=2)
# return SyntheticImage
from .image import SyntheticImage
i = SyntheticImage(self)
i.log.append(stage)
i.stage = 'SyntheticImage: initial'
i.log.append(i.stage)
i.filter = {'name': filter_name, 'waf_0': waf_0, 'waf_min': waf_min, 'waf_max': waf_max}
i.val = val_2D
i.wav = np.array(waf_0)
return i
def add_noise(self, mu_noise, sigma_noise, seed=None, diagnostics=None):
'''
Adds normal distributed noise to every slice in the val cube
of SyntheticCube.
Parameters
----------
mu_noise : float
Mean of the normal distribution.
Good choice: mu_noise = 0.
sigma_noise : float
Standard deviation of the normal distribution. Good choice around:
* ``'ergs/cm^2/s'`` : sigma_noise = 10.**(-13)
* ``'ergs/cm^2/s/Hz'`` : sigma_noise = 10.**(-26)
* ``'Jy'`` : sigma_noise = 10.**(-3)
* ``'mJy'`` : sigma_noise = 10.**(-1)
* ``'MJy/sr'`` : sigma_noise = 10.**(-10)
seed : float, ``None``
When float seed fixes the random numbers to a certain
sequence in order to create reproducible results.
Default is ``None``.
diagnostics : truetype
When ``True`` noise array is stored in a fits file.
Returns
-------
cube : SyntheticCube
'''
stage = 'SyntheticCube: add_noise'
# add different noise with same mu and sigma to 3D cube
val = self.val.copy()
for i in range(len(self.wav)):
if sigma_noise != 0. and sigma_noise != 0:
if seed is not None:
np.random.seed(seed=seed)
noise = normal(mu_noise, sigma_noise, self.pixel)
if sigma_noise == 0. or sigma_noise == 0:
noise = np.zeros(self.pixel)
val[:, :, i] = self.val[:,:, i] + noise
if diagnostics is True:
fits.writeto(self.name + '_process-output_SC-noise.fits', noise, clobber=True)
# return SyntheticCube
c = SyntheticCube(self)
c.val = val
c.stage = stage
c.log.append(c.stage)
return c
def get_rough_sed(self):
'''
Collapses the current val cube into 1D array (SED).
Returns
-------
sed : SyntheticSED
'''
stage = 'SyntheticCube: get_rough_sed'
# for MJy/sr convert first, add and then convert back
if self.unit_out == 'MJy/sr' or self.unit_out == 'Jy/arcsec^2':
s = ConvertUnits(wav=self.wav, val=self.val)
self.val = s.get_unit(in_units=self.units, out_units='Jy', input_resolution=self.resolution['arcsec'])
# collapse every slice to one scalar value
rough_sed = np.sum(np.sum(self.val.copy(), axis=1), axis=0)
if self.unit_out == 'MJy/sr' or self.unit_out == 'Jy/arcsec^2':
s = ConvertUnits(wav=self.wav, val=rough_sed)
rough_sed = s.get_unit(in_units='Jy', out_units=self.unit_out, input_resolution=self.resolution['arcsec'] * self.pixel[0])
# return SyntheticSED
from .sed import SyntheticSED
s = SyntheticSED(self)
s.log.append(stage)
s.stage = 'SyntheticSED: initial'
s.log.append(s.stage)
s.val = rough_sed
return s
def get_total_val(self, wav_1, wav_2):
'''
Collapses the val of SyntheticCube within the boundaries wav_1
and wav_2 into a 0D value val.
WARNING: This tool cannot replace convolve_filter!
But it can be used to produce rough estimates
in-between the processes.
Parameters
----------
wav_1, wav_2 : float
Boundaries in microns.
Returns
-------
val : SyntheticFlux
'''
stage = 'SyntheticCube: get_total_val'
# slices within boundaries are extracted, averaged collapsed to an 2D image and finally collpased to a single scalar value
# for MJy/sr convert first, add and then convert back
if self.unit_out == 'MJy/sr' or self.unit_out == 'Jy/arcsec^2':
s = ConvertUnits(wav=self.wav, val=self.val)
val = s.get_unit(in_units=self.units, out_units='Jy', input_resolution=self.resolution['arcsec'])
else: val = self.val
c = get_slices(wav=self.wav, val=val, wav_1=wav_1, wav_2=wav_2)
i = average_collapse(val=c['val_short'])
f_total = np.sum(i)
# real limits within collaps
wav_max = 10 ** (np.log10(self.wav[c['filter_index'][0]]) + self.spacing_wav / 2.)
wav_min = 10 ** (np.log10(self.wav[c['filter_index'][-1]]) - self.spacing_wav / 2.)
wav_total = central_wav(wav=[wav_min, wav_max])
if self.unit_out == 'MJy/sr' or self.unit_out == 'Jy/arcsec^2':
s = ConvertUnits(wav=wav_total, val=f_total)
f_total = s.get_unit(in_units='Jy', out_units=self.unit_out, input_resolution=self.resolution['arcsec'] * self.pixel[0])
# return SyntheticFlux
from .flux import SyntheticFlux
f = SyntheticFlux(self)
f.log.append(stage)
f.stage = 'SyntheticFlux: initial'
f.log.append(f.stage)
f.wav = np.array(wav_total)
f.val = np.array(f_total)
f.filter = {'name': 'val_tot', 'waf_0': wav_total, 'waf_min': wav_min, 'waf_max': wav_max}
return f
def plot_image(self, wav_interest, prefix=None, name=None, multi_cut=None, single_cut=None, set_cut=None, dpi=None):
'''
Plots a certain slice close the wav_interest.
The wavelength interval of the chosen slice labels the plot.
Parameters
----------
wav_interest : float, ``None``
* float : wavelength close to slice in microns.
* ``None`` : Only if input_array is SyntheticImage like
prefix : str
Name of the image. Default naming chain is switched off.
name : str
Name of image within the default naming chain to distinguish the
plot files. E. g. 'PSF_gaussian'
mulit_cut : ``True``, ``None``
* ``True`` : plots chosen image slice at cuts of [100, 99, 95, 90]%.
* ``None`` : no mulit-plot is returned.
Default is ``None``.
single_cut : float, ``None``
* float : cut level for single plot of image slice between 0 and 100.
* ``None`` : no single plot is returned.
set_cut : tuple, ``None``
* tuple : set_cut(v_min, v_max)
Minimal and maximal physical value of val in the colorbars.
* ``None`` : no plot with minimal and maximal cut is returned.
Default is ``None``.
dpi : ``None``, scalar > 0
The resolution in dots per inch.
``None`` is default and will use the value valig.dpi
in the matplotlibrc file.
Returns
-------
cube : SyntheticCube
'''
stage = 'SyntheticCube: plot_image'
if prefix is None and name is None:
raise Exception('If prefix name is not given, you need to give the a name to enable the default naming chain.')
if prefix is not None:
if multi_cut is True and (single_cut is not None or set_cut is not None):
raise Exception('If prefix naming is enabled only one plotting option can be chosen.')
elif multi_cut is None and (single_cut is not None and set_cut is not None):
raise Exception('If prefix naming is enabled only one plotting option can be chosen.')
plot = MakePlots(prefix=prefix, name=name, input_array=SyntheticCube(self), wav_interest=wav_interest, multi_cut=multi_cut, single_cut=single_cut, set_cut=set_cut, dpi=dpi)
# return SyntheticCube
c = SyntheticCube(self)
c.stage = stage
c.log.append(c.stage)
return c
@property
def spacing_wav(self):
'''
The property spacing_wav estimates the width of the logarithmic
spaced wav entries.
'''
if self.wav.ndim != 0:
spacing_wav = np.log10(self.wav[0] / self.wav[-1]) / (len(self.wav) - 1)
else:
spacing_wav = None
return spacing_wav
@property
def pixel(self):
'''
The property pixel is a tuple which resembles the current pixel in a
value val. ``pixel(x,y)`` are calls as follows:
``x = pixel[0]``
``y = pixel[1]``
'''
if self.val.ndim in (0, 1):
pixel = (None, None)
if self.val.ndim in (2, 3):
pixel = (self.val.shape[0], self.val.shape[1])
return pixel
@property
def shape(self):
'''
The property shape is a string, which resembles the current shape of
the value val.
scalar: ``'()'``
1D: ``'(wav)'``
2D: ``'(x, y)'``
3D: ``'(x, y , wav)'``
'''
if self.val.ndim == 0:
shape = '()'
if self.val.ndim == 1:
shape = '(wav)'
if self.val.ndim == 2:
shape = '(x, y)'
if self.val.ndim == 3:
shape = '(x, y, wav)'
return shape
@property
def resolution(self):
'''
The property resolution tells you the current resolution. If we are already
in the SED or flux everything is considered as one large pixel.
resolution in arcsec per pixel : ``resolution['arcsec']``
resolution in rad per pixel : ``resolution['rad']``
'''
resolution = {}
if self.pixel[0] is None:
resolution['rad'] = self.FOV[0] / 1. / self.distance
else:
resolution['rad'] = self.FOV[0] / self.pixel[0] / self.distance
resolution['arcsec'] = np.degrees(resolution['rad']) * 3600
return resolution
| bsd-2-clause | -1,980,972,553,126,057,000 | 31.483204 | 180 | 0.532655 | false |
manimaul/MX-Cart | buildLin.py | 1 | 4497 | import os, hashlib
from shutil import copy2 as copy
from Resources import versionNum
from subprocess import Popen
from shutil import rmtree
def md5sum(fd, block_size=2**20):
md5 = hashlib.md5()
while True:
data = fd.read(block_size)
if not data:
break
md5.update(data)
return md5.hexdigest()
#clean any previous
mPath = os.path.dirname(__file__)+"/build/debpkg/"
if os.path.isdir(mPath):
rmtree(mPath)
#create DEBIAN directory
mPath = os.path.dirname(__file__)+"/build/debpkg/DEBIAN"
if not os.path.isdir(mPath):
os.makedirs(mPath)
#write control file
control = open(mPath+"/control", "w")
control.write( "Package: MXCart\n" + \
"Version: %s\n" %(versionNum) + \
"Section: misc\n" + \
"Priority: optional\n" + \
"Architecture: all\n" + \
"Depends: pngnq, python, python-wxgtk2.8, python-imaging, python-gdal, python-pyproj, python-simplejson, python-shapely\n" + \
"Installed-Size: 331\n" + \
"Maintainer: Will Kamp\n" + \
"Description: BSB version 2 and 3 chart import utility for MX Mariner\n" )
control.close()
#copy over needed python files
mPath = os.path.dirname(__file__)+"/build/debpkg/usr/local/lib/mxcart/"
if not os.path.isdir(mPath):
os.makedirs(mPath)
for pyFile in ["/BsbHeader.py", "/buildWin.py", "/GUI.py", "/MXCart.py", "/BsbScales.py", \
"/BsbOutlines.py", "/FilePathSearch.py", "/Helper_Gdal.py", "/MyGemfBuilder.py", \
"/Helper_Tiler.py", "/Helper_Merge.py", "/Resources.py", "/FindZoom.py", "/GenerateData.py", \
"/reader_bsb_data.csv", "/my_tilers_tools/viewer-google.html", "/my_tilers_tools/viewer-openlayers.html"]:
#print os.path.dirname(__file__)+pyFile, mPath
copy(os.path.dirname(__file__)+pyFile, mPath)
mPath = os.path.dirname(__file__)+"/build/debpkg/usr/local/lib/mxcart/my_tilers_tools/"
if not os.path.isdir(mPath):
os.makedirs(mPath)
for pyFile in ["/my_tilers_tools/gdal_tiler.py", \
"/my_tilers_tools/generate_efficient_map_file.py", \
"/my_tilers_tools/map2gdal.py", \
"/my_tilers_tools/reader_backend.py", \
"/my_tilers_tools/reader_bsb.py", \
"/my_tilers_tools/tiler_functions.py", \
"/my_tilers_tools/tiles_convert.py", \
"/my_tilers_tools/tiles_merge_simple.py" ]:
#print os.path.dirname(__file__)+pyFile, mPath
copy(os.path.dirname(__file__)+pyFile, mPath)
#copy dependant images
mPath = os.path.dirname(__file__)+"/build/debpkg/usr/local/share/mxcart/"
if not os.path.isdir(mPath):
os.makedirs(mPath)
for pyFile in ["/kattegat.png", "/spinner.gif"]:
#print os.path.dirname(__file__)+pyFile, mPath
copy(os.path.dirname(__file__)+pyFile, mPath)
mPath = os.path.dirname(__file__)+"/build/debpkg/usr/local/share/icons/hicolor/48x48/apps/"
if not os.path.isdir(mPath):
os.makedirs(mPath)
copy(os.path.dirname(__file__)+"/mxcart.png", mPath)
#create bin
mPath = os.path.dirname(__file__)+"/build/debpkg/usr/local/bin"
if not os.path.isdir(mPath):
os.makedirs(mPath)
binsh = open(mPath + "/mxcart", "w")
binsh.write("#!/bin/bash\n\n" + \
"cd /usr/local/lib/mxcart\n" + \
"python MXCart.py\n")
binsh.close()
Popen(["chmod", "777", mPath + "/mxcart"])
#create desktop entry
mPath = os.path.dirname(__file__)+"/build/debpkg/usr/local/share/applications"
if not os.path.isdir(mPath):
os.makedirs(mPath)
desktop = open(mPath + "/mxcart.desktop", "w")
desktop.write("[Desktop Entry]\n" + \
"Version=%s\n" %(versionNum) + \
"Name=MX Cart\n" + \
"Comment=BSB Chart Import Utility\n" + \
"Path=/usr/local/lib/mxcart/\n" + \
"Exec=mxcart\n" + \
"Icon=/usr/local/share/icons/hicolor/48x48/apps/mxcart.png\n" + \
"StartupNotify=true\n" + \
"Terminal=false\n" + \
"Type=Application\n" + \
"Categories=Education;Science;Geography;" )
desktop.close()
Popen(["dpkg-deb", "-b", os.path.dirname(__file__)+"/build/debpkg", os.path.dirname(__file__)+"/build/MXCart_%s_.deb" %(versionNum)])
##write md5sum file
#mPath = os.path.dirname(__file__)+"/build/debpkg/DEBIAN"
#md5sums = open(mPath+"/md5sums", "w")
#for ea in os.listdir(os.path.dirname(__file__)+"/build/debpkg/usr/local/lib/mxcart/"):
# fd = open( os.path.dirname(__file__)+"/build/debpkg/usr/local/lib/mxcart/"+ea, "rb" )
# md5sums.write(md5sum(fd) + " " + "/usr/local/lib/mxcart/"+ea+"\n")
# fd.close()
##for fd in os
#md5sums.close()
| bsd-2-clause | 551,775,681,851,644,600 | 38.104348 | 133 | 0.632866 | false |
jumoconnect/openjumo | jumodjango/cust_admin/templatetags/ext_admin_list.py | 1 | 5314 | import datetime
from django.conf import settings
from django.contrib.admin.util import lookup_field, display_for_field, label_for_field
from django.contrib.admin.views.main import ALL_VAR, EMPTY_CHANGELIST_VALUE
from django.contrib.admin.views.main import ORDER_VAR, ORDER_TYPE_VAR, PAGE_VAR, SEARCH_VAR
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.forms.forms import pretty_name
from django.utils import formats
from django.template.defaultfilters import escapejs
from django.utils.html import escape, conditional_escape
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.utils.encoding import smart_unicode, force_unicode
from django.template import Library
from django.contrib.admin.templatetags.admin_list import result_headers, result_hidden_fields
register = Library()
"""
All this was copy and pasted so the custDismissRelatedLookupPopup could be inserted here.
Do a find.
"""
def ext_items_for_result(cl, result, form):
"""
Generates the actual list of data.
"""
first = True
pk = cl.lookup_opts.pk.attname
for field_name in cl.list_display:
row_class = ''
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except (AttributeError, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean:
allow_tags = True
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if not allow_tags:
result_repr = escape(result_repr)
else:
result_repr = mark_safe(result_repr)
else:
if value is None:
result_repr = EMPTY_CHANGELIST_VALUE
if isinstance(f.rel, models.ManyToOneRel):
result_repr = escape(getattr(result, f.name))
else:
result_repr = display_for_field(value, f)
if isinstance(f, models.DateField) or isinstance(f, models.TimeField):
row_class = ' class="nowrap"'
if force_unicode(result_repr) == '':
result_repr = mark_safe(' ')
# If list_display_links not defined, add the link tag to the first field
if (first and not cl.list_display_links) or field_name in cl.list_display_links:
table_tag = {True:'th', False:'td'}[first]
first = False
url = cl.url_for_result(result)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
result_id = repr(force_unicode(value))[1:]
#All this was copy and pasted so the custDismissRelatedLookupPopup could be inserted here.
ext_attrib = ""
if cl.is_popup:
if cl.is_ext_popup:
ext_attrib = 'onclick="opener.custDismissRelatedLookupPopup(window, %s, \'%s\'); return false;"' % (result_id, escapejs(result_repr))
else:
ext_attrib = ' onclick="opener.dismissRelatedLookupPopup(window, %s); return false;"' % result_id
yield mark_safe(u'<%s%s><a href="%s" %s>%s</a></%s>' % \
(table_tag, row_class, url, ext_attrib, result_repr, table_tag))
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if form and field_name in form.fields:
bf = form[field_name]
result_repr = mark_safe(force_unicode(bf.errors) + force_unicode(bf))
else:
result_repr = conditional_escape(result_repr)
yield mark_safe(u'<td%s>%s</td>' % (row_class, result_repr))
if form and not form[cl.model._meta.pk.name].is_hidden:
yield mark_safe(u'<td>%s</td>' % force_unicode(form[cl.model._meta.pk.name]))
def ext_results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
yield list(ext_items_for_result(cl, res, form))
else:
for res in cl.result_list:
yield list(ext_items_for_result(cl, res, None))
def ext_result_list(cl):
"""
Displays the headers and data list together
"""
return {'cl': cl,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': list(result_headers(cl)),
'results': list(ext_results(cl))}
ext_result_list = register.inclusion_tag("admin/change_list_results.html")(ext_result_list)
| mit | -7,825,369,702,562,516,000 | 42.917355 | 153 | 0.605947 | false |
vhernandez/jwsProcessor | src/jwsprocessor/fc_calculator.py | 1 | 7488 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
To convert from mdeg to molar elipticity (deg*cm2*dmol-1):
m.e. = mdeg / (10*l*(C/MW)*Rn)
where
l = light path in cm
C = concentration in mg/ml
MW = molecular weight
Rn = number of residue of the protein
"""
import pygtk
pygtk.require("2.0")
import gtk
import gobject
from tools import _
CU_WEIGHT_VOL = 0
CU_MICROMOLAR = 1
CU_MILIMOLAR = 2
CONC_UNITS_LIST = [CU_WEIGHT_VOL, CU_MICROMOLAR, CU_MILIMOLAR]
class ProteinInfo:
def __init__(self, name, molecular_weight, residue_number,
def_lp = 0.1, def_c=0.1, def_c_units = CU_WEIGHT_VOL):
self.name = name
self.molecular_weight = molecular_weight
self.residue_number = residue_number
self.default_light_path = def_lp
self.default_concentration = def_c
if def_c_units in CONC_UNITS_LIST:
self.default_conc_units = def_c_units
else:
self.default_conc_units = CU_WEIGHT_VOL
def get_c_units(self):
if not self.default_conc_units in CONC_UNITS_LIST:
self.default_conc_units = CU_WEIGHT_VOL
return self.default_conc_units
class CDCorrectionFactorCalculator(gtk.Dialog):
c_text = _("Concentration (%s):")
def __init__(self, initial_params=ProteinInfo("C-LytA", 15840, 136),
parent=None):
gtk.Dialog.__init__(self, title=_("Calculate correction factor"),
parent=parent, flags=gtk.DIALOG_MODAL,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
self._create_widgets()
self._configure_widgets(initial_params)
self.protein_info = initial_params
self.correction_factor = 0
def _calculate_fc(self):
cu = self.c_units_combo.get_active()
if cu ==CU_MICROMOLAR: # uM
C = self.C_spinner.get_value() / 1000000.0
elif cu ==CU_MILIMOLAR: #mM
C = self.C_spinner.get_value() / 1000.0
else: #mg/ml
MW = self.MW_spinner.get_value()
if MW != 0.0:
C = self.C_spinner.get_value() / MW
else:
C = 0.0
LP = self.LP_spinner.get_value()
Rn = self.Rn_spinner.get_value()
FC_0 = 10*LP*C*Rn
if FC_0 != 0:
self.correction_factor = 1.0/FC_0
else:
self.correction_factor = 0.0
return self.correction_factor
def _c_units_changed_cb(self, widget):
cu = self.c_units_combo.get_active()
if cu ==CU_MICROMOLAR:
text = self.c_text % "uM"
self.C_spinner.set_increments(0.1, 1.0)
elif cu ==CU_MILIMOLAR:
text = self.c_text % "mM"
self.C_spinner.set_increments(0.01, 0.1)
else:
text = self.c_text % "mg/ml"
self.C_spinner.set_increments(0.01, 0.1)
self.C_label.set_text(text)
self._update_factor_cb(widget)
def _copy_to_clipboard_cb(self, widget):
clipboard = gtk.Clipboard()
clipboard.set_text("%f" % self._calculate_fc())
def _update_factor_cb(self, widget):
self.factor_entry.set_text("%f" % self._calculate_fc())
def _configure_widgets(self, protein_info):
self.LP_spinner.set_value(protein_info.default_light_path)
self.C_spinner.set_value(protein_info.default_concentration)
self.c_units_combo.set_active(protein_info.get_c_units())
self._c_units_changed_cb(self.c_units_combo)
self.MW_spinner.set_value(protein_info.molecular_weight)
self.Rn_spinner.set_value(protein_info.residue_number)
self._update_factor_cb(self)
self.c_units_combo.connect("changed", self._c_units_changed_cb)
self.LP_spinner.connect("value-changed", self._update_factor_cb )
self.C_spinner.connect("value-changed", self._update_factor_cb )
self.MW_spinner.connect("value-changed", self._update_factor_cb )
self.Rn_spinner.connect("value-changed", self._update_factor_cb )
def _create_widgets(self):
def create_label(label):
l = gtk.Label(label)
l.set_alignment(0,0.5)
l.set_use_markup(True)
return l
self.LP_spinner = gtk.SpinButton()
self.LP_spinner.set_range(0.0,10.0)
self.LP_spinner.set_digits(2)
self.LP_spinner.set_increments(0.01, 0.1)
self.C_label = create_label(_("Concentration (mg/ml):"))
self.C_spinner = gtk.SpinButton()
self.C_spinner.set_range(0.0,50.0)
self.C_spinner.set_digits(4)
self.C_spinner.set_increments(0.01, 0.1)
self.MW_spinner = gtk.SpinButton()
self.MW_spinner.set_range(1.0,1000000000000.0)
self.MW_spinner.set_digits(2)
self.MW_spinner.set_increments(10.0, 100.0)
self.Rn_spinner = gtk.SpinButton()
self.Rn_spinner.set_range(1.0,1000000000000.0)
self.Rn_spinner.set_digits(0)
self.Rn_spinner.set_increments(1.0, 10.0)
self.factor_entry = gtk.Entry()
self.factor_entry.props.editable = False
self.factor_entry.set_text("%f" % 0.0)
self.c_units_list = gtk.ListStore(str)
self.c_units_list.append(["m:v (mg/ml)"])
self.c_units_list.append(["micromolar"])
self.c_units_list.append(["milimolar"])
cell = gtk.CellRendererText()
self.c_units_combo = gtk.ComboBox(self.c_units_list)
self.c_units_combo.pack_start(cell, True)
self.c_units_combo.add_attribute(cell, 'text', 0)
self.c_units_combo.set_active(0)
self.copy_to_clipboard_btn = gtk.Button(stock=gtk.STOCK_COPY)
self.copy_to_clipboard_btn.connect("clicked", self._copy_to_clipboard_cb)
table = gtk.Table(6,2)
table.set_row_spacings(3)
table.set_col_spacings(3)
table.attach(create_label(_("Light path (cm):")),
0,1,0,1, gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(self.LP_spinner,
1,2,0,1, gtk.EXPAND|gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(self.c_units_combo,
0,2,1,2, gtk.EXPAND|gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(self.C_label,
0,1,2,3, gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(self.C_spinner,
1,2,2,3, gtk.EXPAND|gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(create_label(_("Molecular weight (g/mol):")),
0,1,3,4, gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(self.MW_spinner,
1,2,3,4, gtk.EXPAND|gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(create_label(_("Residue number:")),
0,1,4,5, gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(self.Rn_spinner,
1,2,4,5, gtk.EXPAND|gtk.FILL, gtk.EXPAND|gtk.FILL)
table.attach(create_label(_("<b>Correction factor:</b>")),
0,1,5,6, gtk.FILL, gtk.EXPAND|gtk.FILL,0,5)
table.attach(self.factor_entry,
1,2,5,6, gtk.EXPAND|gtk.FILL, gtk.EXPAND|gtk.FILL,0,5)
self.vbox.pack_start(table, False, False, 4)
self.action_area.pack_end(self.copy_to_clipboard_btn, False, False, 0)
self.set_border_width(2)
self.show_all()
if __name__=="__main__":
w = CDCorrectionFactorCalculator()
w.run()
| gpl-2.0 | 4,273,316,415,626,022,400 | 38.619048 | 89 | 0.581197 | false |
NewForester/apl-py | test/monadic.py | 1 | 11954 | #!/usr/bin/python3
"""
doctest style unit tests for APL monadic functions
WIP - grows as more monadic functions are implemented.
The tests in this module exercise monadic functions with
numeric scalar and vector arguments only.
Other cases are covered in other test modules.
Each test passes an APL expression to the evaluate function.
Both positive and negative (e.g. DOMAIN ERROR) cases are tested.
Note:
testDyadic --eager # run with eager evaluation
testDyadic --lazy # run with lazy evaluation
"""
from test.base import preamble, testResult as test
from test.base import saveIndexOrigin, setIndexOrigin, restoreIndexOrigin
# ------------------------------
def monadic():
"""
>>> test(r"⌹ 1")
FUNCTION NOT YET IMPLEMENTED
>>> test(r"¯ 1")
INVALID TOKEN
"""
pass
# ------------------------------
def conjugate_plus():
"""
>>> test(r"+ 0")
0
>>> test(r"+ 0 0.5 1 2")
0 0.5 1 2
>>> test(r"+ ¯0 ¯0.5 ¯1 ¯2")
0 ¯0.5 ¯1 ¯2
"""
pass
# --------------
def negate_minus():
"""
>>> test(r"- 0")
0
>>> test(r"- 0 0.5 1 2")
0 ¯0.5 ¯1 ¯2
>>> test(r"- ¯0 ¯0.5 ¯1 ¯2")
0 0.5 1 2
"""
pass
# --------------
def direction_times():
"""
>>> test(r"× 0")
0
>>> test(r"× 0 0.5 1 2")
0 1 1 1
>>> test(r"× ¯0 ¯0.5 ¯1 ¯2")
0 ¯1 ¯1 ¯1
"""
pass
# --------------
def reciprocal_divide():
"""
>>> test(r"÷ 0")
DOMAIN ERROR
>>> test(r"÷ 1")
1
>>> test(r"÷ 0.25 0.5 1 2")
4 2 1 0.5
>>> test(r"÷ ¯0.25 ¯0.5 ¯1 ¯2")
¯4 ¯2 ¯1 ¯0.5
"""
pass
# --------------
def ceil_maximum():
"""
>>> test(r"⌈ 0")
0
>>> test(r"⌈ 0.25 0.5 1.1 9.9")
1 1 2 10
>>> test(r"⌈ ¯0.25 ¯0.5 ¯1.1 ¯9.9")
0 0 ¯1 ¯9
"""
pass
# --------------
def floor_minimum():
"""
>>> test(r"⌊ 0")
0
>>> test(r"⌊ 0.25 0.5 1.1 9.9")
0 0 1 9
>>> test(r"⌊ ¯0.25 ¯0.5 ¯1.1 ¯9.9")
¯1 ¯1 ¯2 ¯10
"""
pass
# --------------
def magnitude_residue():
"""
>>> test(r"| 0")
0
>>> test(r"| 0.25 0.5 1.1 9.9")
0.25 0.5 1.1 9.9
>>> test(r"| ¯0.25 ¯0.5 ¯1.1 ¯9.9")
0.25 0.5 1.1 9.9
"""
pass
# ------------------------------
def exponential_power():
"""
>>> test(r"* 1 0 ¯1")
2.718281828 1 0.3678794412
>>> test(r"* 1 2 3 4 5")
2.718281828 7.389056099 20.08553692 54.59815003 148.4131591
>>> test(r"* ¯1 ¯2 ¯3 ¯4 ¯5")
0.3678794412 0.1353352832 0.04978706837 0.01831563889 0.006737946999
>>> test(r"* 0.125 0.25 0.5")
1.133148453 1.284025417 1.648721271
"""
pass
# --------------
def logarithm():
"""
>>> test(r"⍟ 0")
DOMAIN ERROR
>>> test(r"⍟ -1")
DOMAIN ERROR
>>> test(r"⍟ 2.718281828459045 1.0 0.36787944117144233")
1 0 ¯1
>>> test(r"⍟ 1 2 3 4 5")
0 0.6931471806 1.098612289 1.386294361 1.609437912
>>> test(r"⍟ 0.125 0.25 0.5")
¯2.079441542 ¯1.386294361 ¯0.6931471806
"""
pass
# --------------
def factorial_binomial():
"""
>>> test(r"! -1")
DOMAIN ERROR
>>> test(r"! 0")
1
>>> test(r"? 1")
1
>>> test(r"? ,1")
1
>>> test(r"⍴ ? 1")
⍬
>>> test(r"⍴ ? ,1")
1
>>> test(r"! 1 2 3 4 5")
1 2 6 24 120
>>> test(r"! 0.125 0.25 0.5")
0.9417426998 0.9064024771 0.8862269255
"""
pass
# --------------
def roll_deal():
"""
randomness makes positive testing a little tricky
>>> test(r"? 0")
DOMAIN ERROR
>>> test(r"? -1")
DOMAIN ERROR
>>> test(r"? 1")
1
>>> test(r"? 1÷2")
DOMAIN ERROR
>>> test(r"! 1")
1
>>> test(r"! ,1")
1
>>> test(r"⍴ ! 1")
⍬
>>> test(r"⍴ ! ,1")
1
>>> test(r"⍴ ? ⍳ 6")
6
"""
pass
# ------------------------------
def pi_circular():
"""
>>> test(r"○ 1")
3.141592654
>>> test(r"○ ¯1 0 1")
¯3.141592654 0 3.141592654
>>> test(r"○ ¯0.5 0.5")
¯1.570796327 1.570796327
>>> test(r"○ ¯2 2")
¯6.283185307 6.283185307
"""
pass
# ------------------------------
def tilde():
"""
>>> test(r"~ 1")
0
>>> test(r"~ 0")
1
>>> test(r"⍴ ~ 1")
⍬
>>> test(r"⍴ ~ ,1")
1
>>> test(r"~ ¯1")
DOMAIN ERROR
>>> test(r"~ 0.5")
DOMAIN ERROR
>>> test(r"~ 0 1 0 ")
1 0 1
"""
pass
# --------------
def or_gcd():
"""
>>> test(r"∨ 1")
VALENCE ERROR
"""
pass
# --------------
def and_lcm():
"""
>>> test(r"∧ 1")
VALENCE ERROR
"""
pass
# --------------
def nor():
"""
>>> test(r"⍱ 1")
VALENCE ERROR
"""
pass
# --------------
def nand():
"""
>>> test(r"⍲ 1")
VALENCE ERROR
"""
pass
# ------------------------------
def lt():
"""
>>> test(r"< 1")
VALENCE ERROR
"""
pass
# --------------
def le():
"""
>>> test(r"≤ 1")
VALENCE ERROR
"""
pass
# --------------
def ge():
"""
>>> test(r"≥ 1")
VALENCE ERROR
"""
pass
# --------------
def gt():
"""
>>> test(r"> 1")
VALENCE ERROR
"""
pass
# --------------
def eq():
"""
>>> test(r"= 1")
VALENCE ERROR
"""
pass
# --------------
def ne():
"""
>>> test(r"≠ 1")
VALENCE ERROR
"""
pass
# ------------------------------
def depth_match():
"""
depth function
>>> test(r"≡ 1.2")
0
>>> test(r"≡ 1 2 3")
1
"""
pass
# --------------
def tally_notMatch():
"""
tally function
>>> test(r"≢ 1.2")
1
>>> test(r"≢ 1 2 3")
3
"""
pass
# ------------------------------
def iota():
"""
>>> IO = saveIndexOrigin()
>>> setIndexOrigin(1)
>>> test(r"⍳ -1")
DOMAIN ERROR
>>> test(r"⍳ 0")
⍬
>>> test(r"⍳ 1")
1
>>> test(r"⍳ 2")
1 2
>>> test(r"⍳ 3.142")
DOMAIN ERROR
>>> test(r"⍳ 1 1")
WIP - LENGTH ERROR
>>> setIndexOrigin(0)
>>> test(r"⍳ -1")
DOMAIN ERROR
>>> test(r"⍳ 0")
⍬
>>> test(r"⍳ 1")
0
>>> test(r"⍳ 2")
0 1
>>> test(r"⍳ 3.142")
DOMAIN ERROR
>>> test(r"⍳ 1 1")
WIP - LENGTH ERROR
>>> restoreIndexOrigin(IO)
"""
pass
# --------------
def rho():
"""
>>> test(r"⍴ 1.2")
⍬
>>> test(r"⍴ ,1.2")
1
>>> test(r"⍴ 1 2 3")
3
"""
pass
# --------------
def comma():
"""
>>> test(r", 1.2")
1.2
>>> test(r"⍴ , 1.2")
1
>>> test(r", 1 2 3")
1 2 3
>>> test(r"⍴ , 1 2 3")
3
"""
pass
# --------------
def enlist_membership():
"""
>>> test(r"∊ 1.2")
1.2
>>> test(r"≢ ∊ 1.2")
1
>>> test(r"≡ ∊ 1.2")
1
>>> test(r"∊ 1 2 3")
1 2 3
>>> test(r"≢ ∊ 1 2 3")
3
>>> test(r"≡ ∊ 1 2 3")
1
"""
pass
# --------------
def find():
"""
>>> test(r"⍷ 1")
VALENCE ERROR
"""
pass
# --------------
def transpose():
"""
>>> test(r"⍉ 1.2")
1.2
>>> test(r"⍴ ⍉ 1.2")
⍬
>>> test(r"⍴ ⍉ ,1.2")
1
>>> test(r"⍉ 1 2 3")
1 2 3
"""
pass
# --------------
def enclose_partition():
"""
>>> test(r"⊂ 1.2")
1.2
>>> test(r"≡ ⊂ 1.2")
0
>>> test(r"≢ ⊂ 1.2")
1
>>> test(r"⊂ ,1.2")
(1.2)
>>> test(r"≡ ⊂ ,1.2")
2
>>> test(r"≢ ⊂ ,1.2")
1
>>> test(r"⊂ 1 2 3")
(1 2 3)
>>> test(r"≡ ⊂ 1 2 3")
2
>>> test(r"≢ ⊂ 1 2 3")
1
"""
pass
# --------------
def disclose_pick():
"""
>>> test(r"⊃ 1.2")
1.2
>>> test(r"⊃ ,1.2")
1.2
>>> test(r"⊃ 1 2 3")
1 2 3
>>> test(r"⍴ ⊃ 1.2")
⍬
>>> test(r"⍴ ⊃ ,1.2")
1
>>> test(r"⍴ ⊃ 1 2 3")
3
>>> test(r"(1.2) ≡ ⊃⊂ 1.2")
1
>>> test(r"(,1.2 )≡ ⊃⊂ ,1.2")
1
>>> test(r"(1 2 3) ≡ ⊃⊂ 1 2 3")
1
"""
pass
# --------------
def reverse_rotate():
"""
>>> test(r"⌽ 1.2")
1.2
>>> test(r"⍴ ⌽ 1.2")
⍬
>>> test(r"⍴ ⌽ ,1.2")
1
>>> test(r"⌽ 1 2 3")
3 2 1
"""
pass
# --------------
def unique_union():
"""
>>> test(r"∪ 0")
0
>>> test(r"∪ 0.5 1.5 0.5 1.5 0.5 1.5 ¯0.5 ¯1.5")
0.5 1.5 ¯0.5 ¯1.5
>>> test(r"⍴ ∪ 0.5 1.5 0.5 1.5 0.5 1.5 ¯0.5 ¯1.5")
4
"""
pass
# --------------
def intersection():
"""
>>> test(r"∩ 1")
VALENCE ERROR
"""
pass
# --------------
def tail_drop():
"""
>>> test(r"↓ 1 2 3")
2 3
>>> test(r"↓ 0.1 0.2 0.3")
0.2 0.3
>>> test(r"↓ ¯1 ¯2 ¯3")
¯2 ¯3
"""
pass
# --------------
def head_take():
"""
>>> test(r"↑ 1")
1
>>> test(r"↑ ,1")
1
>>> test(r"⍴ ↑ 1")
⍬
>>> test(r"⍴ ↑ ,1")
⍬
>>> test(r"↑ 1 2 3")
1
>>> test(r"↑ 0.1 0.2 0.3")
0.1
>>> test(r"↑ ¯1 ¯2 ¯3")
¯1
"""
pass
# --------------
def compress_replicate():
"""
>>> test(r"/ 1")
SYNTAX ERROR
>>> test(r"⌿ 1")
SYNTAX ERROR
"""
pass
# --------------
def expand():
"""
>>> test(r"\\ 1")
SYNTAX ERROR
>>> test(r"⍀ 1")
SYNTAX ERROR
"""
pass
# ------------------------------
def encode():
"""
>>> test(r"⊤ 1")
VALENCE ERROR
"""
pass
# --------------
def decode():
"""
>>> test(r"⊥ 1")
VALENCE ERROR
"""
pass
# --------------
def gradeUp():
"""
>>> IO = saveIndexOrigin()
>>> setIndexOrigin(1)
>>> test(r"⍋ 1 2 3 4 5 6")
1 2 3 4 5 6
>>> test(r"⍋ 6 5 4 3 2 1")
6 5 4 3 2 1
>>> test(r"⍋ 1 6 2 5 3 4")
1 3 5 6 4 2
>>> test(r"⍋ 1.1 6.6 2.2 5.5 3.3 4.4")
1 3 5 6 4 2
>>> test(r"⍋ ¯1 ¯6 ¯2 ¯5 ¯3 ¯4")
2 4 6 5 3 1
>>> test(r"⍋ 1")
DOMAIN ERROR
>>> test(r"⍋ ,1")
1
>>> setIndexOrigin(0)
>>> test(r"⍋ 1 2 3 4 5 6")
0 1 2 3 4 5
>>> test(r"⍋ 6 5 4 3 2 1")
5 4 3 2 1 0
>>> test(r"⍋ 1 6 2 5 3 4")
0 2 4 5 3 1
>>> test(r"⍋ 1.1 6.6 2.2 5.5 3.3 4.4")
0 2 4 5 3 1
>>> test(r"⍋ ¯1 ¯6 ¯2 ¯5 ¯3 ¯4")
1 3 5 4 2 0
>>> test(r"⍋ 1")
DOMAIN ERROR
>>> test(r"⍋ ,1")
0
>>> restoreIndexOrigin(IO)
"""
pass
# --------------
def gradeDown():
"""
>>> IO = saveIndexOrigin()
>>> setIndexOrigin(1)
>>> test(r"⍒ 1 2 3 4 5 6")
6 5 4 3 2 1
>>> test(r"⍒ 6 5 4 3 2 1")
1 2 3 4 5 6
>>> test(r"⍒ 1 6 2 5 3 4")
2 4 6 5 3 1
>>> test(r"⍒ 1.1 6.6 2.2 5.5 3.3 4.4")
2 4 6 5 3 1
>>> test(r"⍒ ¯1 ¯6 ¯2 ¯5 ¯3 ¯4")
1 3 5 6 4 2
>>> test(r"⍒ 1")
DOMAIN ERROR
>>> test(r"⍒ ,1")
1
>>> setIndexOrigin(0)
>>> test(r"⍒ 1 2 3 4 5 6")
5 4 3 2 1 0
>>> test(r"⍒ 6 5 4 3 2 1")
0 1 2 3 4 5
>>> test(r"⍒ 1 6 2 5 3 4")
1 3 5 4 2 0
>>> test(r"⍒ 1.1 6.6 2.2 5.5 3.3 4.4")
1 3 5 4 2 0
>>> test(r"⍒ ¯1 ¯6 ¯2 ¯5 ¯3 ¯4")
0 2 4 5 3 1
>>> test(r"⍒ 1")
DOMAIN ERROR
>>> test(r"⍒ ,1")
0
>>> restoreIndexOrigin(IO)
"""
pass
# ------------------------------
if __name__ == "__main__":
preamble()
if test and __name__:
import doctest
doctest.testmod()
else:
IO = saveIndexOrigin()
setIndexOrigin(0)
restoreIndexOrigin(IO)
# EOF
| apache-2.0 | -8,556,475,950,720,416,000 | 13.789203 | 73 | 0.363723 | false |
cropleyb/pentai | pentai/db/ai_factory.py | 1 | 2712 | #!/usr/bin/env python
import pentai.ai.ai_player as aip_m
from pentai.ai.priority_filter import *
from pentai.ai.priority_filter_2 import *
import pentai.db.openings_book as ob_m
import pentai.db.games_mgr
from pentai.ai.ai_genome import *
class AIFactory: # TODO: These are just functions
def create_player(self, genome):
filter_num = genome.filter_num
if filter_num == 1:
sf = PriorityFilter()
elif filter_num == 2:
sf = PriorityFilter2()
sf.set_max_moves_per_depth_level(mmpdl=genome.mmpdl, narrowing=genome.narrowing,
chokes=genome.chokes)
try:
vision = genome.vision
except AttributeError:
vision = 100
sf.set_vision(vision)
try:
p_name = genome.p_name
except AttributeError:
p_name = genome.name
p = aip_m.AIPlayer(sf, p_name=p_name)
try:
p_key = genome.p_key
except AttributeError:
p_key = genome.key
p.p_key = p_key
try:
p.bl_cutoff = genome.bl_cutoff
except AttributeError:
p.bl_cutoff = False
ob = ob_m.instance
if not ob:
ob = ob_m.OpeningsBook()
ob_m.instance = ob
p.set_use_openings_book(ob)
p.force_depth = genome.force_depth
p.set_max_depth(genome.max_depth + genome.max_depth_boost)
self.set_utility_config(genome, p)
p.genome = genome
return p
def set_utility_config(self, genome, player):
uc = player.get_utility_calculator()
uc.capture_score_base = genome.capture_score_base
uc.take_score_base = genome.take_score_base
uc.threat_score_base = genome.threat_score_base
uc.captures_scale = genome.captures_scale
uc.move_factor = genome.move_factor
uc.length_factor = genome.length_factor # TODO: Cull
uc.use_net_captures = genome.use_net_captures
uc.length_scale = genome.length_scale
uc.scale_pob = genome.scale_pob
uc.calc_mode = genome.calc_mode
try:
uc.enclosed_four_base = genome.enclosed_four_base
except:
uc.enclosed_four_base = genome.enclosed_four_base = 400
uc.judgement = genome.judgement
try:
uc.checkerboard_value = genome.checkerboard_value
except:
uc.checkerboard_value = 0
'''
# Example of how to handle new fields:
try:
uc.length_scale = genome.length_scale
except:
uc.length_scale = genome.length_scale = [1,1,1,1,1,1]
# Then run upgrade_dbs.py
'''
| mit | -9,205,106,758,263,038,000 | 27.547368 | 88 | 0.581121 | false |
atbentley/plank | setup.py | 1 | 1311 | import os
import re
from setuptools import setup as setup
def read(path):
global os
with open(os.path.join(os.path.dirname(__file__), path), 'r') as f:
data = f.read()
return data.strip()
def get_version():
global os, re, read
_version_re = re.compile(r'\s*__version__\s*=\s*\'(.*)\'\s*')
return _version_re.findall(read(os.path.join('plank', '__init__.py')))[0]
install_requires = read('requirements.txt').split('\n')
test_requires = read('build-requirements.txt').split('\n')
test_requires.extend(install_requires)
setup(
name='plank',
version=get_version(),
url='http://github.com/atbentley/plank/',
license='MIT',
author='Andrew Bentley',
author_email='andrew.t.bentley@gmail.com',
description="A simple task and build runner that doesn't get in the way.",
long_description=read('README.rst'),
packages=['plank'],
entry_points={'console_scripts': ['plank = plank.cli:main']},
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=install_requires,
tests_require=test_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5'
]
)
| mit | 8,118,092,918,294,471,000 | 28.133333 | 78 | 0.633105 | false |
d33tah/macd | macd/views.py | 1 | 3323 | import datetime
import subprocess
from macd.models import SeenEvent, Device
from django.shortcuts import render
from django.utils import timezone
def index(request):
now = timezone.now()
time_threshold = now - datetime.timedelta(minutes=10)
items = SeenEvent.objects.filter(date__gte=time_threshold)
devices_set = set(item.mac.device for item in items
if not item.mac.device.ignored)
devices = []
two_minutes = now - datetime.timedelta(minutes=2)
macdb = open("/usr/share/nmap/nmap-mac-prefixes").readlines()
for device in devices_set:
found_2min = False
earliest_since = None
macs = device.mac_set.all()
items_for_mac = SeenEvent.objects.filter(mac__in=macs)[:10000]
if len(items_for_mac) > 0:
for i in range(1, len(items_for_mac)):
curr, previous = items_for_mac[i].date, items_for_mac[i-1].date
difference = previous - curr
if earliest_since is None or previous < earliest_since:
earliest_since = previous
if difference > datetime.timedelta(minutes=10):
break
if items_for_mac[0].date > two_minutes:
found_2min = True
name = str(device)
vendor = ''
if name.endswith(" (?)") and len(name) == 21:
mac_name = name.upper().replace(":","")[:6]
vendor = [" ".join(i.split()[1:])
for i in macdb
if i.split()[0] == mac_name][0]
devices += [{
'leaving': found_2min,
'name': name,
'vendor': vendor,
'since': timezone.localtime(earliest_since) if earliest_since else ''
}]
last_event_time = SeenEvent.objects.latest('date').date
viewer_ip = request.META['REMOTE_ADDR']
viewer_ip = '192.168.88.1'
viewer_mac = ''
if (viewer_ip.startswith('192.168.') or
viewer_ip.startswith('172.16.') or
viewer_ip.startswith('10.')):
arp_output = subprocess.check_output(['/usr/sbin/arp', '-n'])
arp_data_lines = [i for i in arp_output.split("\n")[1:] if i!='']
arp_macs = {cols[0]: cols[2]
for line in arp_data_lines
for cols in [line.split()]}
viewer_mac = arp_macs.get(viewer_ip, '')
viewer_mac_unknown = list(Device.objects.filter(description='',
mac=viewer_mac))
viewer_mac_unknown = True
viewer_mac = 'test'
return render(request, 'macd/index.html', {
'devices': devices,
'last_event': timezone.localtime(last_event_time),
'viewer_mac': viewer_mac if viewer_mac_unknown else None,
})
def unknown(request):
macs = [m for d in Device.objects.filter(description='')
for m in d.mac_set.all()]
devices_dict = {mac: len(SeenEvent.objects.filter(mac=mac))
for mac in macs}
devices = ["%s: %s" % (k, v)
for k, v in reversed(sorted(devices_dict.items(),
key=lambda x: x[1]))
]
return render(request, 'macd/index.html', {
'devices': devices,
'last_event': timezone.localtime(last_event_time)
})
| gpl-3.0 | -2,758,185,787,429,000,700 | 37.195402 | 81 | 0.547397 | false |
Esri/raster-functions | functions/BlockStatistics.py | 1 | 4494 | import numpy as np
from skimage.transform import resize
from skimage.util import view_as_blocks
class BlockStatistics():
def __init__(self):
self.name = "Block Statistics Function"
self.description = ("Generates a downsampled output raster by computing a statistical "
"measure over non-overlapping square blocks of pixels in the input raster.")
self.func = np.mean
self.padding = 0
def getParameterInfo(self):
return [
{
'name': 'raster',
'dataType': 'raster',
'value': None,
'required': True,
'displayName': "Input Raster",
'description': "The primary input raster over which block statistics is computed."
},
{
'name': 'size',
'dataType': 'numeric',
'value': 1,
'required': False,
'displayName': "Block Size",
'description': ("The number of pixels along each side of the square "
"non-overlapping block.")
},
{
'name': 'measure',
'dataType': 'string',
'value': 'Mean',
'required': False,
'displayName': "Measure",
'domain': ('Minimum', 'Maximum', 'Mean', 'Median', 'Sum', 'Nearest'),
'description': ("The statistical measure computed over each "
"block of pixels in the input raster.")
},
{
'name': 'factor',
'dataType': 'numeric',
'value': 1,
'required': False,
'displayName': "Downsampling Factor",
'description': ("The integer factor by which the output raster is "
"downsampled relative to the input raster.")
},
]
def getConfiguration(self, **scalars):
s = scalars.get('size', None)
s = 3 if s is None else s
self.padding = int(s / 2)
return {
'samplingFactor': scalars.get('size', 1.0),
'inheritProperties': 4 | 8, # inherit everything but the pixel type (1) and NoData (2)
'invalidateProperties': 2 | 4 | 8, # invalidate histogram, statistics, and key metadata
'inputMask': True,
'resampling': False,
'padding': self.padding,
}
def updateRasterInfo(self, **kwargs):
f = kwargs.get('factor', 1.0)
kwargs['output_info']['cellSize'] = tuple(np.multiply(kwargs['raster_info']['cellSize'], f))
kwargs['output_info']['pixelType'] = 'f4' # output pixels values are floating-point
kwargs['output_info']['statistics'] = ()
kwargs['output_info']['histogram'] = ()
m = kwargs.get('measure')
m = m.lower() if m is not None and len(m) else 'mean'
if m == 'minimum':
self.func = np.min
elif m == 'maximum':
self.func = np.max
elif m == 'mean':
self.func = np.mean
elif m == 'median':
self.func = np.median
elif m == 'sum':
self.func = np.sum
elif m == 'nearest':
self.func = None
return kwargs
def updatePixels(self, tlc, shape, props, **pixelBlocks):
p = pixelBlocks['raster_pixels']
m = pixelBlocks['raster_mask']
if self.func is None:
b = resize(p, shape, order=0, preserve_range=True)
else:
blockSizes = tuple(np.divide(p.shape, shape))
b = np.ma.masked_array(view_as_blocks(p, blockSizes),
view_as_blocks(~m.astype('b1'), blockSizes))
for i in range(len(b.shape) // 2):
b = self.func(b, axis=-1)
b = b.data
d = self.padding
pixelBlocks['output_pixels'] = b.astype(props['pixelType'], copy=False)
pixelBlocks['output_mask'] = resize(m, shape, order=0, preserve_range=True).astype('u1', copy=False)
return pixelBlocks
def updateKeyMetadata(self, names, bandIndex, **keyMetadata):
if bandIndex == -1:
keyMetadata['datatype'] = 'Processed'
return keyMetadata
| apache-2.0 | -8,694,753,335,761,033,000 | 37.078261 | 110 | 0.491322 | false |
arbor-jjones/idataco | idataco/widgets/calls.py | 1 | 19598 | #!/usr/bin/python
########################################################################
# Copyright (c) 2015-2016
# Jason Jones <jason<at>jasonjon<dot>es>
# All rights reserved.
########################################################################
#
# This file is part of IDA TACO
#
# IDATACO is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import re
import idc
import idataco.util.qt as qt
from . import TacoTabWidget
import logging
log = logging.getLogger(__name__)
class TacoCalls(TacoTabWidget):
name = "Cuckoo Calls"
short_name = "cuckoo_calls"
description = """ Display Win32 API calls logged by Cuckoo Sandbox and allow for filtering by value and category.
Also support annotating calls with metadata from the log """
_COLOR_MAP = {
"registry": qt.qcolor()(0xff, 0xc5, 0xc5),
"filesystem": qt.qcolor()(0xff, 0xe3, 0xc5),
"process": qt.qcolor()(0xc5, 0xe0, 0xff),
# "threading": qt.qcolor()(0xa,0xa,0xa),
"services": qt.qcolor()(0xcc, 0xc5, 0xff),
"device": qt.qcolor()(0xcc, 0xc5, 0xff),
"network": qt.qcolor()(0xd3, 0xff, 0xc5),
"synchronization": qt.qcolor()(0xf9, 0xc5, 0xff),
# "crypto": qt.qcolor()(0x9,0x9,0x9),
"browser": qt.qcolor()(0xdf, 0xff, 0xdf),
}
def initVars(self):
self._call_table = qt.qtablewidget()()
self._call_table.setEditTriggers(qt.qabstractitemview().NoEditTriggers)
self._call_table.setRowCount(0)
self._call_table.setColumnCount(7)
self._call_table.setHorizontalHeaderLabels(["Category",
"Caller",
"Parent Caller",
"Logged API",
"Called API",
"Return",
"Args"]
)
self._clipboard = qt.qclipboard()
self.setupTableContextMenu()
self._marked_up = set()
self._checkbox_map = {}
# call color picker setup
self._color_picker = qt.qcolordialog()()
self._color_picker.setCurrentColor(qt.qcolor()(0xff, 165, 0x0))
self._color_picker.blockSignals(True)
self._color_picker.currentColorChanged.connect(self.chooseColor)
self._color_picker.blockSignals(False)
self._color_button = qt.qpushbutton()("")
self._color_button.setStyleSheet("font-size:15px;background-color:#ffa500; border: 2px solid #222222")
self._color_button.setFixedSize(15, 15)
self._color_button.clicked.connect(self._color_picker.open)
# func color picker setup
self._func_color_picker = qt.qcolordialog()()
self._func_color_picker.setCurrentColor(qt.qcolor()(0xff, 0xff, 0xff))
self._func_color_picker.blockSignals(True)
self._func_color_picker.currentColorChanged.connect(self.chooseFuncColor)
self._func_color_picker.blockSignals(False)
self._func_color_button = qt.qpushbutton()("")
self._func_color_button.setStyleSheet("font-size:15px;background-color:#ffffff; border: 2px solid #222222")
self._func_color_button.setFixedSize(15, 15)
self._func_color_button.clicked.connect(self._func_color_picker.open)
def initLayout(self):
call_table_layout = qt.qvboxlayout()()
filter_layout = qt.qhboxlayout()()
markup_layout = qt.qhboxlayout()()
markup_layout.setAlignment(qt.qtcore().Qt.AlignLeft)
markup_layout.addWidget(qt.qlabel()("Choose Color: "))
markup_layout.addWidget(self._color_button)
markup_layout.addWidget(qt.qlabel()("Choose Function Color: "))
markup_layout.addWidget(self._func_color_button)
markup_layout.addWidget(qt.qlabel()("\t\t\tMarkup: "))
markup_category_button = qt.qpushbutton()("Selected")
markup_category_button.clicked.connect(self.markupCategories)
markup_all_button = qt.qpushbutton()("All")
markup_all_button.clicked.connect(self.markupAll)
markup_remove_button = qt.qpushbutton()("Remove")
markup_remove_button.clicked.connect(self.removeAllMarkup)
markup_layout.addWidget(markup_category_button)
markup_layout.addWidget(markup_all_button)
markup_layout.addWidget(markup_remove_button)
call_table_layout.addLayout(markup_layout)
self._checkbox_layout = qt.qhboxlayout()()
self._checkbox_layout.setAlignment(qt.qtcore().Qt.AlignLeft)
self._checkbox_layout.addWidget(qt.qlabel()("Categories: "))
call_table_layout.addLayout(self._checkbox_layout)
self._filter_box = qt.qlineedit()()
self._filter_box.setMaxLength(80)
_filter_button = qt.qpushbutton()("Filter")
_filter_button.clicked.connect(self.filterCallData)
filter_layout.setAlignment(qt.qtcore().Qt.AlignLeft)
filter_layout.addWidget(qt.qlabel()("Select: "))
b_all = qt.qpushbutton()("All")
width = b_all.fontMetrics().boundingRect("All").width() + 9
b_all.setMaximumWidth(width)
b_all.clicked.connect(self.selectAll)
b_none = qt.qpushbutton()("None")
width = b_all.fontMetrics().boundingRect("None").width() + 9
b_none.setMaximumWidth(width)
b_none.clicked.connect(self.selectNone)
filter_layout.addWidget(b_all)
filter_layout.addWidget(b_none)
filter_layout.addWidget(qt.qlabel()("Filter Calls: "))
filter_layout.addWidget(self._filter_box)
filter_layout.addWidget(_filter_button)
call_table_layout.addLayout(filter_layout)
call_table_layout.addWidget(self._call_table)
self.setLayout(call_table_layout)
def setupTableContextMenu(self):
self._call_table.setContextMenuPolicy(qt.qtcore().Qt.ActionsContextMenu)
copyAction = qt.qaction()(self._call_table)
copyAction.setText("Copy Cell Value")
copyAction.triggered.connect(self.copyToClipboard)
self._call_table.addAction(copyAction)
markupAction = qt.qaction()(self._call_table)
markupAction.setText("Add Markup to Selected Call")
markupAction.triggered.connect(self.markUpItem)
self._call_table.addAction(markupAction)
unMarkupAction = qt.qaction()(self._call_table)
unMarkupAction.setText("Remove Markup from Selected Call")
unMarkupAction.triggered.connect(self.unMarkUpItem)
self._call_table.addAction(unMarkupAction)
log.debug("Creating Calls Tab")
def filterCallData(self):
unchecked = []
for cat, cb in self._checkbox_map.items():
if not cb.isChecked(): unchecked.append(cat)
log.debug(unchecked)
if self._filter_box.text():
filter_text = self._filter_box.text().lower()
log.debug(filter_text)
for i in range(self._call_table.rowCount()):
if self._call_table.item(i, 0).text() in unchecked:
self._call_table.setRowHidden(i, True)
elif filter_text not in self._call_table.item(i, 3).text().lower() and \
filter_text not in self._call_table.item(i, 4).text().lower() and \
filter_text not in self._call_table.item(i, 6).text().lower():
self._call_table.setRowHidden(i, True)
else:
self._call_table.setRowHidden(i, False)
else:
for i in range(self._call_table.rowCount()):
if self._call_table.item(i, 0).text() in unchecked:
self._call_table.setRowHidden(i, True)
else:
self._call_table.setRowHidden(i, False)
def load(self):
for cat in sorted(list(self.parent.call_categories)):
self._checkbox_map[cat] = qt.qcheckbox()(cat.capitalize())
for cat in sorted(self._checkbox_map.keys()):
cb = self._checkbox_map[cat]
cb.setCheckState(qt.qtcore().Qt.Checked)
cb.clicked.connect(self.filterCallData)
self._checkbox_layout.addWidget(cb)
self._call_table.clear()
self._call_table.setHorizontalHeaderLabels(["Category",
"Caller",
"Parent Caller",
"Logged API",
"Called API",
"Return",
"Args"]
)
header = self._call_table.horizontalHeader()
header.setStretchLastSection(True)
if self.parent.cuckoo_version.startswith(("1.3", "2.0")):
self._call_table.itemDoubleClicked.connect(self.clickRow)
self._call_table.setRowCount(len(self.parent.calls))
self._call_table.setWordWrap(True)
row = 0
for call in self.parent.calls:
called_api = ""
arg_str = "\r\n".join(
["{}: {}".format(k, unicode(v)[:80].encode("unicode-escape")) for k, v in call["arguments"].items()])
bg_color = self._COLOR_MAP.get(call.get("category", ""), qt.qcolor()(0xff, 0xff, 0xff))
self._call_table.setItem(row, 0, qt.qtablewidgetitem()(call.get("category", "")))
self._call_table.item(row, 0).setBackground(bg_color)
call_addr = ""
if self.parent.cuckoo_version.startswith("1.3"):
call_addr = idc.PrevHead(int(call["caller"], 16))
call_addr = call.get("caller", "0x00000000") if call_addr == idc.BADADDR else "0x{:08x}".format(call_addr)
# cuckoo 2.0 stores call stack in "stack", but only enabled in DEBUG
if self.parent.cuckoo_version.startswith("2.0") and call["stacktrace"]:
for ret_addr in call["stacktrace"]:
if ret_addr.count(" ") > 2:
called_api = ret_addr.split("+")[0]
else:
break
for ret_addr in call["stacktrace"]:
if ret_addr.count(" ") <= 2:
call_addr = int(ret_addr.split(" @ ")[-1], 16)
call_addr = idc.PrevHead(call_addr)
call_addr = call.get("caller", "0x00000000") if call_addr == idc.BADADDR else "0x{:08x}".format(call_addr)
break
ret = call["return"] if "return" in call else str(call["return_value"])
self._call_table.setItem(row, 1, qt.qtablewidgetitem()(call_addr))
self._call_table.item(row, 1).setBackground(bg_color)
self._call_table.setItem(row, 2, qt.qtablewidgetitem()(call.get("parentcaller", "")))
self._call_table.item(row, 2).setBackground(bg_color)
self._call_table.setItem(row, 3, qt.qtablewidgetitem()(call["api"]))
self._call_table.item(row, 3).setBackground(bg_color)
self._call_table.setItem(row, 4, qt.qtablewidgetitem()(called_api))
self._call_table.item(row, 4).setBackground(bg_color)
self._call_table.setItem(row, 5, qt.qtablewidgetitem()(ret))
self._call_table.item(row, 5).setBackground(bg_color)
self._call_table.setItem(row, 6, qt.qtablewidgetitem()(arg_str))
self._call_table.item(row, 6).setBackground(bg_color)
row += 1
self._call_table.setVisible(False)
#self._call_table.resizeRowsToContents()
self._call_table.resizeColumnsToContents()
self._call_table.setVisible(True)
self._call_table.setSortingEnabled(True)
def clickRow(self):
addr = int(self._call_table.item(self._call_table.currentRow(), 1).text(), 16)
if addr:
idc.Jump(addr)
def markUpItem(self):
markup_ea = int(self._call_table.item(self._call_table.currentRow(), 1).text(), 16)
self.markupEa(markup_ea)
self._marked_up.add(markup_ea)
if self.parent.cuckoo_version.startswith("1.3"):
markup_parent_ea = int(self._call_table.item(self._call_table.currentRow(), 2).text(), 16)
self.markupEa(markup_parent_ea, colorFunc=False)
self._marked_up.add(markup_parent_ea)
called_api_name = self._call_table.item(self._call_table.currentRow(), 4).text()
logged_api_name = self._call_table.item(self._call_table.currentRow(), 3).text()
args = self._call_table.item(self._call_table.currentRow(), 6).text()
self.addPosterior(markup_ea, logged_api_name, called_api_name, args)
def addPosterior(self, markup_ea, logged_api_name, called_api_name, args):
log.debug("Adding posterior lines")
idc.MakeComm(markup_ea, str(called_api_name))
idc.ExtLinB(markup_ea, 0, "api: {}".format(logged_api_name))
ln = 1
for arg in re.split("\r?\n", args.strip()):
idc.ExtLinB(markup_ea, ln, str(arg.strip()))
ln += 1
def markupEa(self, markup_ea, colorFunc=True):
if markup_ea and markup_ea != idc.BADADDR:
func_color = self._func_color_picker.currentColor()
ea_color = self._color_picker.currentColor()
log.debug("Coloring instructions for 0x{:08x}".format(markup_ea))
idc.SetColor(markup_ea, idc.CIC_FUNC,
int("0x{:02x}{:02x}{:02x}".format(*func_color.getRgb()[:3][::-1]), 16))
if colorFunc:
idc.SetColor(markup_ea, idc.CIC_ITEM,
int("0x{:02x}{:02x}{:02x}".format(*ea_color.getRgb()[:3][::-1]), 16))
def unMarkUpItem(self):
markup_ea = None
try:
markup_ea = int(self._call_table.item(self._call_table.currentRow(), 1).text(), 16)
self.removeMarkup(markup_ea)
except ValueError:
pass
if markup_ea in self._marked_up:
self._marked_up.remove(markup_ea)
if markup_ea and self.parent.cuckoo_version.startswith("1.3"):
try:
markup_parent_ea = int(self._call_table.item(self._call_table.currentRow(), 2).text(), 16)
self.removeMarkup(markup_parent_ea)
if markup_parent_ea in self._marked_up:
self._marked_up.remove(markup_parent_ea)
except ValueError:
pass
def removeMarkup(self, ea, force=False):
if ea in self._marked_up or force:
log.debug("Removing color")
idc.SetColor(ea, idc.CIC_FUNC, 0xffffff)
idc.SetColor(ea, idc.CIC_ITEM, 0xffffff)
idc.MakeComm(ea, "")
log.debug("Removing posterior lines")
i = 0
while idc.LineB(ea, i):
idc.DelExtLnB(ea, i)
i += 1
def removeAllMarkup(self):
for i in range(self._call_table.rowCount()):
try:
markup_ea = int(self._call_table.item(i, 1).text(), 16)
self.removeMarkup(markup_ea, force=True)
except ValueError:
pass
self._marked_up = set()
def markupCategories(self):
checked = []
last_ea = idc.BADADDR
for cat, cb in self._checkbox_map.items():
if cb.isChecked():
checked.append(cat)
for i in range(self._call_table.rowCount()):
if self._call_table.item(i, 0).text() in checked:
markup_ea = int(self._call_table.item(i, 1).text(), 16)
if markup_ea and markup_ea != idc.BADADDR and markup_ea != last_ea and markup_ea not in self._marked_up:
last_ea = markup_ea
self.markupEa(markup_ea)
api_name = self._call_table.item(i, 3).text()
args = self._call_table.item(i, 6).text()
self.addposterior(markup_ea, api_name, args)
self._marked_up.add(markup_ea)
if self.parent.cuckoo_version.startswith("1.3"):
try:
markup_parent_ea = int(self._call_table.item(i, 2).text(), 16)
self.markupEa(markup_parent_ea)
self._marked_up.add(markup_parent_ea)
except ValueError:
pass
def markupAll(self):
last_ea = idc.BADADDR
for i in range(self._call_table.rowCount()):
markup_ea = None
try:
markup_ea = int(self._call_table.item(i, 1).text(), 16)
except ValueError:
pass
if markup_ea and markup_ea != idc.BADADDR and markup_ea != last_ea and markup_ea not in self._marked_up:
self.markupEa(markup_ea)
self._marked_up.add(markup_ea)
api_name = self._call_table.item(i, 3).text()
args = self._call_table.item(i, 6).text()
self.addPosterior(markup_ea, api_name, args)
if self.parent.cuckoo_version.startswith("1.3"):
try:
markup_parent_ea = int(self._call_table.item(i, 2).text(), 16)
self.markupEa(markup_parent_ea, colorFunc=False)
self._marked_up.add(markup_parent_ea)
except ValueError:
pass
def copyToClipboard(self):
item = self._call_table.item(self._call_table.currentRow(), self._call_table.currentColumn())
self._clipboard.setText(item.text())
def selectAll(self):
for cat, cb in self._checkbox_map.iteritems():
cb.setCheckState(qt.qtcore().Qt.Checked)
self.filterCallData()
def selectNone(self):
for cat, cb in self._checkbox_map.iteritems():
cb.setCheckState(qt.qtcore().Qt.Unchecked)
self.filterCallData()
def chooseColor(self):
color = self._color_picker.currentColor()
self._color_button.setStyleSheet("font-size:40px;background-color:#{:02x}{:02x}{:02x};\
border: 2px solid #222222".format(color.red(), color.green(), color.blue()))
def chooseFuncColor(self):
color = self._func_color_picker.currentColor()
self._func_color_button.setStyleSheet("font-size:40px;background-color:#{:02x}{:02x}{:02x};\
border: 2px solid #222222".format(color.red(), color.green(), color.blue()))
def getTacoTab(self):
taco_tab = qt.qwidget()()
layout = qt.qhboxlayout()()
layout.addWidget(self)
taco_tab.setLayout(layout)
return taco_tab, self.name
| gpl-3.0 | -1,733,163,824,245,982,000 | 47.630273 | 130 | 0.566027 | false |
matematik7/CSSQC | cssqc/rules/singleLinePerProperty.py | 1 | 1052 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------
# cssqc/singleLinePerProperty.py
#
# Do not allow property on multiple lines.
# ----------------------------------------------------------------
# copyright (c) 2014 - Domen Ipavec
# Distributed under The MIT License, see LICENSE
# ----------------------------------------------------------------
from cssqc.qualityWarning import QualityWarning
from cssyacc import Whitespace
from cssqc.helpers import inspectWhitespaces
import re
def getHelp():
return """Do not allow property over multiple lines."""
class singleLinePerProperty:
def __init__(self, data):
pass
def on_Statement(self, s):
ln = inspectWhitespaces(s, lambda ws: '\n' not in ws.value)
if ln != -1 \
and not (type(s.text[-1]) is Whitespace \
and s.text[-1].lineno == ln):
return [QualityWarning('singleLinePerProperty', ln, 'Property over multiple lines.')]
else:
return [] | mit | -9,212,977,344,346,658,000 | 31.90625 | 97 | 0.526616 | false |
ardi69/pyload-0.4.10 | pyload/plugin/account/Keep2ShareCc.py | 1 | 2271 | # -*- coding: utf-8 -*-
import re
import time
from pyload.plugin.Account import Account
class Keep2ShareCc(Account):
__name = "Keep2ShareCc"
__type = "account"
__version = "0.05"
__description = """Keep2Share.cc account plugin"""
__license = "GPLv3"
__authors = [("aeronaut", "aeronaut@pianoguy.de"),
("Walter Purcaro", "vuolter@gmail.com")]
VALID_UNTIL_PATTERN = r'Premium expires:\s*<b>(.+?)<'
TRAFFIC_LEFT_PATTERN = r'Available traffic \(today\):\s*<b><a href="/user/statistic.html">(.+?)<'
LOGIN_FAIL_PATTERN = r'Please fix the following input errors'
def loadAccountInfo(self, user, req):
validuntil = None
trafficleft = -1
premium = False
html = req.load("http://keep2share.cc/site/profile.html", decode=True)
m = re.search(self.VALID_UNTIL_PATTERN, html)
if m:
expiredate = m.group(1).strip()
self.logDebug("Expire date: " + expiredate)
if expiredate == "LifeTime":
premium = True
validuntil = -1
else:
try:
validuntil = time.mktime(time.strptime(expiredate, "%Y.%m.%d"))
except Exception, e:
self.logError(e)
else:
premium = validuntil > time.mktime(time.gmtime())
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
if m:
try:
trafficleft = self.parseTraffic(m.group(1))
except Exception, e:
self.logError(e)
return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
def login(self, user, data, req):
req.cj.setCookie("keep2share.cc", "lang", "en")
html = req.load("http://keep2share.cc/login.html",
post={'LoginForm[username]' : user,
'LoginForm[password]' : data['password'],
'LoginForm[rememberMe]': 1,
'yt0' : ""},
decode=True)
if re.search(self.LOGIN_FAIL_PATTERN, html):
self.wrongPassword()
| gpl-3.0 | -3,576,954,663,591,912,000 | 30.109589 | 101 | 0.506385 | false |
Earthnuker/Universe | date_time.py | 1 | 2439 | from datetime import datetime
import datetime as DT
import time
import calendar
class Clock(object):
def __init__(self,offset=None):
self.timezone=None
if offset is not None:
self.timezone=DT.timezone(DT.timedelta(hours=offset))
def to_str(self,timestamp=None,with_orig=False):
if not timestamp:
timestamp=datetime.now(self.timezone)
if with_orig:
return timestamp,"{month_name} {day}, {year} {clock}".format(**self.as_dict(timestamp))
return "{month_name} {day}, {year} {clock}".format(**self.as_dict(timestamp))
def date(self,D=None):
if D is None:
D=datetime.now(self.timezone)
months=[
"Unesamber","Dutesamber","Trisesamber",
"Tetresamber","Pentesamber","Hexesamber",
"Sevesamber","Octesamber","Novesamber",
"Desamber","Undesamber","Dodesamber",
"Tridesamber","Year Day","Leap Day"
]
D=D.timetuple()
yd=D.tm_yday-1
if calendar.isleap(D.tm_year):
if yd==365:
return "Leap Day"
if yd==366:
return "Year Day"
elif yd==365:
return "Year Day"
P=yd/(365+int(calendar.isleap(D.tm_year)))
month=int(P*(len(months)-2))
month_name=months[month]
day=((yd-1)%28)+1
ret={"month_name":month_name,"month":month+1,"day":day,"year":D.tm_year}
ret['date']="{month_name} {day}, {year}".format(**ret)
return ret
def time(self,D=None):
if D is None:
D=datetime.now(self.timezone)
T=(D.time().microsecond/1000000+time.mktime(D.timetuple()))%(24*60*60)
T="{:03.03f}".format((T/(24*60*60))*1000).zfill(7)
T=T.replace(".",":")
return {"clock":T,"above":T.split(":")[0],"below":T.split(":")[1]}
def as_dict(self,D=None):
if D is None:
D=datetime.now(self.timezone)
ret={'calendar':{
"day":D.day,
"month":D.month,
"year":D.year,
"time":D.time(),
"date":D.date(),
"hour":D.hour,
"minute":D.minute,
"second":D.second,
}}
ret.update(self.date(D))
ret.update(self.time(D))
ret['timestamp']="{month_name} {day}, {year} {clock}".format(**ret)
return ret
Clock().time() | mit | 9,209,623,380,115,924,000 | 35.969697 | 99 | 0.522345 | false |
JASON0916/testrail-library | testrail_client/api/__init__.py | 1 | 1827 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .case import Case
from .configurations import Config
from .milestone import MileStone
from .plan import Plan
from .project import Project
from .result import Result
from .run import Run
from .section import Section
from .suite import Suite
from .test import Test
from .user import User
class TestRailAPI(object):
__version__ = 'v2'
def __init__(self, url, user_name, password):
self.url = url
self.user_name = user_name
self.password = password
def __repr__(self):
return '<TestRail API>'
@property
def user(self):
return User(self.url, self.user_name, self.password)
@property
def case(self):
return Case(self.url, self.user_name, self.password)
@property
def config(self):
return Config(self.url, self.user_name, self.password)
@property
def milestone(self):
return MileStone(self.url, self.user_name, self.password)
@property
def plan(self):
return Plan(self.url, self.user_name, self.password)
@property
def project(self):
return Project(self.url, self.user_name, self.password)
@property
def result(self):
return Result(self.url, self.user_name, self.password)
@property
def run(self):
return Run(self.url, self.user_name, self.password)
@property
def section(self):
return Section(self.url, self.user_name, self.password)
@property
def suite(self):
return Suite(self.url, self.user_name, self.password)
@property
def test(self):
return Test(self.url, self.user_name, self.password)
| mit | -2,382,219,271,476,809,700 | 25.1 | 165 | 0.596606 | false |
nikha1/nyc-taxi | Tools/PostgresRDF/utils/postgresInterface.py | 1 | 3639 | from dateutil import parser
from rdflib import Graph, Literal, BNode, Namespace, RDF, XSD, URIRef
import queries
global SSN
global GEO
global DUL
# SSN Namespace
SSN = Namespace('https://www.w3.org/2005/Incubator/ssn/ssnx/ssn#')
# Geo Namespace
GEO = Namespace('http://www.w3.org/2003/01/geo/wgs84_pos#')
# DUL Namespace
DUL = Namespace('http://www.ontologydesignpatterns.org/ont/dul/DUL.owl#')
def addToGraph(event, graphURI = "http://example.com/g1", db_conf={"dbname" : "postgres", "user" : "postgres", "password" : "admin" }):
#configString = ("dbname=postgres user=waccess password=write")
configString = ("dbname=" + db_conf['dbname'] + " user="+ db_conf['user'] + " password=" + db_conf['password'])
#print configString
graph = Graph('PostgreSQL', identifier=URIRef(graphURI))
graph.open(configString, create=False)
graph.bind('ssn', SSN)
graph.bind('geo', GEO)
graph.bind('dul', DUL)
observation = BNode();
oTime = BNode();
# Observation
graph.add((observation, RDF.type, SSN.Observation))
graph.add((oTime, RDF.type, DUL.TimeInterval))
graph.add((observation, SSN.observationSamplingTime, oTime))
# Time
date = parser.parse(event['pickup_datetime'])
t = Literal(date.strftime("%Y-%m-%dT%H:%M:%S"), datatype=XSD.dateTime)
graph.add((oTime, DUL.hasRegionDataValue, t))
# SensorOutput
sensorOutput = BNode();
graph.add((sensorOutput, RDF.type, SSN.SensorOutput))
graph.add((observation, SSN.observationResult, sensorOutput))
# ObservationValue
observationValue = BNode()
startLocation = BNode()
endLocation = BNode()
graph.add((observationValue, RDF.type, SSN.ObservationValue))
graph.add((sensorOutput, SSN.hasValue, observationValue))
# Start and End Location
graph.add((observationValue, SSN.hasStartLocation, startLocation))
graph.add((observationValue, SSN.hasEndLocation, endLocation))
graph.add((startLocation, RDF.type, GEO.location))
graph.add((endLocation, RDF.type, GEO.location))
# Start Location
lat = Literal(event['pickup_latitude'], datatype=XSD.float)
long = Literal(event['pickup_longitude'], datatype=XSD.float)
# Adding the start location
graph.add((startLocation, GEO.lat, lat))
graph.add((startLocation, GEO.long, long))
# End Location
lat = Literal(event['dropoff_latitude'], datatype=XSD.float)
long = Literal(event['dropoff_longitude'], datatype=XSD.float)
# Adding the start location
graph.add((endLocation, GEO.lat, lat))
graph.add((endLocation, GEO.long, long))
#Duration
date1 = parser.parse(event['dropoff_datetime'])
date2 = parser.parse(event['pickup_datetime'])
dur = date1 - date2
duration = Literal(str(dur), datatype=XSD.float)
graph.add((observation, SSN.hasDuration, duration))
#print str(graph.__len__() / 11)
#Commit and close the graph
graph.commit()
graph.close()
def removeFromGraph(timestamp, graphURI = "http://example.com/g1", db_conf={"dbname" : "postgres", "user" : "postgres", "password" : "admin" }):
configString = ("dbname=postgres user=waccess password=write")
#configString = ("dbname=" + db_conf['dbname'] + "user="+ db_conf['user'] + " password=" + db_conf['password'])
graph = Graph('PostgreSQL', identifier=URIRef(graphURI))
graph.open(configString, create=False)
results = graph.query(queries.getEvents(timestamp))
print len(results)
for result in results:
for node in result:
graph.remove((node, None, None))
# Commit and close the graph
graph.commit()
graph.close()
| mit | 2,477,461,874,589,206,500 | 33.009346 | 144 | 0.676834 | false |
jiaphuan/models | research/lfads/lfads.py | 1 | 87185 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==============================================================================
"""
LFADS - Latent Factor Analysis via Dynamical Systems.
LFADS is an unsupervised method to decompose time series data into
various factors, such as an initial condition, a generative
dynamical system, control inputs to that generator, and a low
dimensional description of the observed data, called the factors.
Additionally, the observations have a noise model (in this case
Poisson), so a denoised version of the observations is also created
(e.g. underlying rates of a Poisson distribution given the observed
event counts).
The main data structure being passed around is a dataset. This is a dictionary
of data dictionaries.
DATASET: The top level dictionary is simply name (string -> dictionary).
The nested dictionary is the DATA DICTIONARY, which has the following keys:
'train_data' and 'valid_data', whose values are the corresponding training
and validation data with shape
ExTxD, E - # examples, T - # time steps, D - # dimensions in data.
The data dictionary also has a few more keys:
'train_ext_input' and 'valid_ext_input', if there are know external inputs
to the system being modeled, these take on dimensions:
ExTxI, E - # examples, T - # time steps, I = # dimensions in input.
'alignment_matrix_cxf' - If you are using multiple days data, it's possible
that one can align the channels (see manuscript). If so each dataset will
contain this matrix, which will be used for both the input adapter and the
output adapter for each dataset. These matrices, if provided, must be of
size [data_dim x factors] where data_dim is the number of neurons recorded
on that day, and factors is chosen and set through the '--factors' flag.
'alignment_bias_c' - See alignment_matrix_cxf. This bias will used to
the offset for the alignment transformation. It will *subtract* off the
bias from the data, so pca style inits can align factors across sessions.
If one runs LFADS on data where the true rates are known for some trials,
(say simulated, testing data, as in the example shipped with the paper), then
one can add three more fields for plotting purposes. These are 'train_truth'
and 'valid_truth', and 'conversion_factor'. These have the same dimensions as
'train_data', and 'valid_data' but represent the underlying rates of the
observations. Finally, if one needs to convert scale for plotting the true
underlying firing rates, there is the 'conversion_factor' key.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import os
import tensorflow as tf
from distributions import LearnableDiagonalGaussian, DiagonalGaussianFromInput
from distributions import diag_gaussian_log_likelihood
from distributions import KLCost_GaussianGaussian, Poisson
from distributions import LearnableAutoRegressive1Prior
from distributions import KLCost_GaussianGaussianProcessSampled
from utils import init_linear, linear, list_t_bxn_to_tensor_bxtxn, write_data
from utils import log_sum_exp, flatten
from plot_lfads import plot_lfads
class GRU(object):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
"""
def __init__(self, num_units, forget_bias=1.0, weight_scale=1.0,
clip_value=np.inf, collections=None):
"""Create a GRU object.
Args:
num_units: Number of units in the GRU
forget_bias (optional): Hack to help learning.
weight_scale (optional): weights are scaled by ws/sqrt(#inputs), with
ws being the weight scale.
clip_value (optional): if the recurrent values grow above this value,
clip them.
collections (optional): List of additonal collections variables should
belong to.
"""
self._num_units = num_units
self._forget_bias = forget_bias
self._weight_scale = weight_scale
self._clip_value = clip_value
self._collections = collections
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_multiplier(self):
return 1
def output_from_state(self, state):
"""Return the output portion of the state."""
return state
def __call__(self, inputs, state, scope=None):
"""Gated recurrent unit (GRU) function.
Args:
inputs: A 2D batch x input_dim tensor of inputs.
state: The previous state from the last time step.
scope (optional): TF variable scope for defined GRU variables.
Returns:
A tuple (state, state), where state is the newly computed state at time t.
It is returned twice to respect an interface that works for LSTMs.
"""
x = inputs
h = state
if inputs is not None:
xh = tf.concat(axis=1, values=[x, h])
else:
xh = h
with tf.variable_scope(scope or type(self).__name__): # "GRU"
with tf.variable_scope("Gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
r, u = tf.split(axis=1, num_or_size_splits=2, value=linear(xh,
2 * self._num_units,
alpha=self._weight_scale,
name="xh_2_ru",
collections=self._collections))
r, u = tf.sigmoid(r), tf.sigmoid(u + self._forget_bias)
with tf.variable_scope("Candidate"):
xrh = tf.concat(axis=1, values=[x, r * h])
c = tf.tanh(linear(xrh, self._num_units, name="xrh_2_c",
collections=self._collections))
new_h = u * h + (1 - u) * c
new_h = tf.clip_by_value(new_h, -self._clip_value, self._clip_value)
return new_h, new_h
class GenGRU(object):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
This version is specialized for the generator, but isn't as fast, so
we have two. Note this allows for l2 regularization on the recurrent
weights, but also implicitly rescales the inputs via the 1/sqrt(input)
scaling in the linear helper routine to be large magnitude, if there are
fewer inputs than recurrent state.
"""
def __init__(self, num_units, forget_bias=1.0,
input_weight_scale=1.0, rec_weight_scale=1.0, clip_value=np.inf,
input_collections=None, recurrent_collections=None):
"""Create a GRU object.
Args:
num_units: Number of units in the GRU
forget_bias (optional): Hack to help learning.
input_weight_scale (optional): weights are scaled ws/sqrt(#inputs), with
ws being the weight scale.
rec_weight_scale (optional): weights are scaled ws/sqrt(#inputs),
with ws being the weight scale.
clip_value (optional): if the recurrent values grow above this value,
clip them.
input_collections (optional): List of additonal collections variables
that input->rec weights should belong to.
recurrent_collections (optional): List of additonal collections variables
that rec->rec weights should belong to.
"""
self._num_units = num_units
self._forget_bias = forget_bias
self._input_weight_scale = input_weight_scale
self._rec_weight_scale = rec_weight_scale
self._clip_value = clip_value
self._input_collections = input_collections
self._rec_collections = recurrent_collections
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_multiplier(self):
return 1
def output_from_state(self, state):
"""Return the output portion of the state."""
return state
def __call__(self, inputs, state, scope=None):
"""Gated recurrent unit (GRU) function.
Args:
inputs: A 2D batch x input_dim tensor of inputs.
state: The previous state from the last time step.
scope (optional): TF variable scope for defined GRU variables.
Returns:
A tuple (state, state), where state is the newly computed state at time t.
It is returned twice to respect an interface that works for LSTMs.
"""
x = inputs
h = state
with tf.variable_scope(scope or type(self).__name__): # "GRU"
with tf.variable_scope("Gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
r_x = u_x = 0.0
if x is not None:
r_x, u_x = tf.split(axis=1, num_or_size_splits=2, value=linear(x,
2 * self._num_units,
alpha=self._input_weight_scale,
do_bias=False,
name="x_2_ru",
normalized=False,
collections=self._input_collections))
r_h, u_h = tf.split(axis=1, num_or_size_splits=2, value=linear(h,
2 * self._num_units,
do_bias=True,
alpha=self._rec_weight_scale,
name="h_2_ru",
collections=self._rec_collections))
r = r_x + r_h
u = u_x + u_h
r, u = tf.sigmoid(r), tf.sigmoid(u + self._forget_bias)
with tf.variable_scope("Candidate"):
c_x = 0.0
if x is not None:
c_x = linear(x, self._num_units, name="x_2_c", do_bias=False,
alpha=self._input_weight_scale,
normalized=False,
collections=self._input_collections)
c_rh = linear(r*h, self._num_units, name="rh_2_c", do_bias=True,
alpha=self._rec_weight_scale,
collections=self._rec_collections)
c = tf.tanh(c_x + c_rh)
new_h = u * h + (1 - u) * c
new_h = tf.clip_by_value(new_h, -self._clip_value, self._clip_value)
return new_h, new_h
class LFADS(object):
"""LFADS - Latent Factor Analysis via Dynamical Systems.
LFADS is an unsupervised method to decompose time series data into
various factors, such as an initial condition, a generative
dynamical system, inferred inputs to that generator, and a low
dimensional description of the observed data, called the factors.
Additoinally, the observations have a noise model (in this case
Poisson), so a denoised version of the observations is also created
(e.g. underlying rates of a Poisson distribution given the observed
event counts).
"""
def __init__(self, hps, kind="train", datasets=None):
"""Create an LFADS model.
train - a model for training, sampling of posteriors is used
posterior_sample_and_average - sample from the posterior, this is used
for evaluating the expected value of the outputs of LFADS, given a
specific input, by averaging over multiple samples from the approx
posterior. Also used for the lower bound on the negative
log-likelihood using IWAE error (Importance Weighed Auto-encoder).
This is the denoising operation.
prior_sample - a model for generation - sampling from priors is used
Args:
hps: The dictionary of hyper parameters.
kind: the type of model to build (see above).
datasets: a dictionary of named data_dictionaries, see top of lfads.py
"""
print("Building graph...")
all_kinds = ['train', 'posterior_sample_and_average', 'prior_sample']
assert kind in all_kinds, 'Wrong kind'
if hps.feedback_factors_or_rates == "rates":
assert len(hps.dataset_names) == 1, \
"Multiple datasets not supported for rate feedback."
num_steps = hps.num_steps
ic_dim = hps.ic_dim
co_dim = hps.co_dim
ext_input_dim = hps.ext_input_dim
cell_class = GRU
gen_cell_class = GenGRU
def makelambda(v): # Used with tf.case
return lambda: v
# Define the data placeholder, and deal with all parts of the graph
# that are dataset dependent.
self.dataName = tf.placeholder(tf.string, shape=())
# The batch_size to be inferred from data, as normal.
# Additionally, the data_dim will be inferred as well, allowing for a
# single placeholder for all datasets, regardless of data dimension.
if hps.output_dist == 'poisson':
# Enforce correct dtype
assert np.issubdtype(
datasets[hps.dataset_names[0]]['train_data'].dtype, int), \
"Data dtype must be int for poisson output distribution"
data_dtype = tf.int32
elif hps.output_dist == 'gaussian':
assert np.issubdtype(
datasets[hps.dataset_names[0]]['train_data'].dtype, float), \
"Data dtype must be float for gaussian output dsitribution"
data_dtype = tf.float32
else:
assert False, "NIY"
self.dataset_ph = dataset_ph = tf.placeholder(data_dtype,
[None, num_steps, None],
name="data")
self.train_step = tf.get_variable("global_step", [], tf.int64,
tf.zeros_initializer(),
trainable=False)
self.hps = hps
ndatasets = hps.ndatasets
factors_dim = hps.factors_dim
self.preds = preds = [None] * ndatasets
self.fns_in_fac_Ws = fns_in_fac_Ws = [None] * ndatasets
self.fns_in_fatcor_bs = fns_in_fac_bs = [None] * ndatasets
self.fns_out_fac_Ws = fns_out_fac_Ws = [None] * ndatasets
self.fns_out_fac_bs = fns_out_fac_bs = [None] * ndatasets
self.datasetNames = dataset_names = hps.dataset_names
self.ext_inputs = ext_inputs = None
if len(dataset_names) == 1: # single session
if 'alignment_matrix_cxf' in datasets[dataset_names[0]].keys():
used_in_factors_dim = factors_dim
in_identity_if_poss = False
else:
used_in_factors_dim = hps.dataset_dims[dataset_names[0]]
in_identity_if_poss = True
else: # multisession
used_in_factors_dim = factors_dim
in_identity_if_poss = False
for d, name in enumerate(dataset_names):
data_dim = hps.dataset_dims[name]
in_mat_cxf = None
in_bias_1xf = None
align_bias_1xc = None
if datasets and 'alignment_matrix_cxf' in datasets[name].keys():
dataset = datasets[name]
if hps.do_train_readin:
print("Initializing trainable readin matrix with alignment matrix" \
" provided for dataset:", name)
else:
print("Setting non-trainable readin matrix to alignment matrix" \
" provided for dataset:", name)
in_mat_cxf = dataset['alignment_matrix_cxf'].astype(np.float32)
if in_mat_cxf.shape != (data_dim, factors_dim):
raise ValueError("""Alignment matrix must have dimensions %d x %d
(data_dim x factors_dim), but currently has %d x %d."""%
(data_dim, factors_dim, in_mat_cxf.shape[0],
in_mat_cxf.shape[1]))
if datasets and 'alignment_bias_c' in datasets[name].keys():
dataset = datasets[name]
if hps.do_train_readin:
print("Initializing trainable readin bias with alignment bias " \
"provided for dataset:", name)
else:
print("Setting non-trainable readin bias to alignment bias " \
"provided for dataset:", name)
align_bias_c = dataset['alignment_bias_c'].astype(np.float32)
align_bias_1xc = np.expand_dims(align_bias_c, axis=0)
if align_bias_1xc.shape[1] != data_dim:
raise ValueError("""Alignment bias must have dimensions %d
(data_dim), but currently has %d."""%
(data_dim, in_mat_cxf.shape[0]))
if in_mat_cxf is not None and align_bias_1xc is not None:
# (data - alignment_bias) * W_in
# data * W_in - alignment_bias * W_in
# So b = -alignment_bias * W_in to accommodate PCA style offset.
in_bias_1xf = -np.dot(align_bias_1xc, in_mat_cxf)
if hps.do_train_readin:
# only add to IO transformations collection only if we want it to be
# learnable, because IO_transformations collection will be trained
# when do_train_io_only
collections_readin=['IO_transformations']
else:
collections_readin=None
in_fac_lin = init_linear(data_dim, used_in_factors_dim,
do_bias=True,
mat_init_value=in_mat_cxf,
bias_init_value=in_bias_1xf,
identity_if_possible=in_identity_if_poss,
normalized=False, name="x_2_infac_"+name,
collections=collections_readin,
trainable=hps.do_train_readin)
in_fac_W, in_fac_b = in_fac_lin
fns_in_fac_Ws[d] = makelambda(in_fac_W)
fns_in_fac_bs[d] = makelambda(in_fac_b)
with tf.variable_scope("glm"):
out_identity_if_poss = False
if len(dataset_names) == 1 and \
factors_dim == hps.dataset_dims[dataset_names[0]]:
out_identity_if_poss = True
for d, name in enumerate(dataset_names):
data_dim = hps.dataset_dims[name]
in_mat_cxf = None
if datasets and 'alignment_matrix_cxf' in datasets[name].keys():
dataset = datasets[name]
in_mat_cxf = dataset['alignment_matrix_cxf'].astype(np.float32)
if datasets and 'alignment_bias_c' in datasets[name].keys():
dataset = datasets[name]
align_bias_c = dataset['alignment_bias_c'].astype(np.float32)
align_bias_1xc = np.expand_dims(align_bias_c, axis=0)
out_mat_fxc = None
out_bias_1xc = None
if in_mat_cxf is not None:
out_mat_fxc = in_mat_cxf.T
if align_bias_1xc is not None:
out_bias_1xc = align_bias_1xc
if hps.output_dist == 'poisson':
out_fac_lin = init_linear(factors_dim, data_dim, do_bias=True,
mat_init_value=out_mat_fxc,
bias_init_value=out_bias_1xc,
identity_if_possible=out_identity_if_poss,
normalized=False,
name="fac_2_logrates_"+name,
collections=['IO_transformations'])
out_fac_W, out_fac_b = out_fac_lin
elif hps.output_dist == 'gaussian':
out_fac_lin_mean = \
init_linear(factors_dim, data_dim, do_bias=True,
mat_init_value=out_mat_fxc,
bias_init_value=out_bias_1xc,
normalized=False,
name="fac_2_means_"+name,
collections=['IO_transformations'])
out_fac_W_mean, out_fac_b_mean = out_fac_lin_mean
mat_init_value = np.zeros([factors_dim, data_dim]).astype(np.float32)
bias_init_value = np.ones([1, data_dim]).astype(np.float32)
out_fac_lin_logvar = \
init_linear(factors_dim, data_dim, do_bias=True,
mat_init_value=mat_init_value,
bias_init_value=bias_init_value,
normalized=False,
name="fac_2_logvars_"+name,
collections=['IO_transformations'])
out_fac_W_mean, out_fac_b_mean = out_fac_lin_mean
out_fac_W_logvar, out_fac_b_logvar = out_fac_lin_logvar
out_fac_W = tf.concat(
axis=1, values=[out_fac_W_mean, out_fac_W_logvar])
out_fac_b = tf.concat(
axis=1, values=[out_fac_b_mean, out_fac_b_logvar])
else:
assert False, "NIY"
preds[d] = tf.equal(tf.constant(name), self.dataName)
data_dim = hps.dataset_dims[name]
fns_out_fac_Ws[d] = makelambda(out_fac_W)
fns_out_fac_bs[d] = makelambda(out_fac_b)
pf_pairs_in_fac_Ws = zip(preds, fns_in_fac_Ws)
pf_pairs_in_fac_bs = zip(preds, fns_in_fac_bs)
pf_pairs_out_fac_Ws = zip(preds, fns_out_fac_Ws)
pf_pairs_out_fac_bs = zip(preds, fns_out_fac_bs)
def _case_with_no_default(pairs):
def _default_value_fn():
with tf.control_dependencies([tf.Assert(False, ["Reached default"])]):
return tf.identity(pairs[0][1]())
return tf.case(pairs, _default_value_fn, exclusive=True)
this_in_fac_W = _case_with_no_default(pf_pairs_in_fac_Ws)
this_in_fac_b = _case_with_no_default(pf_pairs_in_fac_bs)
this_out_fac_W = _case_with_no_default(pf_pairs_out_fac_Ws)
this_out_fac_b = _case_with_no_default(pf_pairs_out_fac_bs)
# External inputs (not changing by dataset, by definition).
if hps.ext_input_dim > 0:
self.ext_input = tf.placeholder(tf.float32,
[None, num_steps, ext_input_dim],
name="ext_input")
else:
self.ext_input = None
ext_input_bxtxi = self.ext_input
self.keep_prob = keep_prob = tf.placeholder(tf.float32, [], "keep_prob")
self.batch_size = batch_size = int(hps.batch_size)
self.learning_rate = tf.Variable(float(hps.learning_rate_init),
trainable=False, name="learning_rate")
self.learning_rate_decay_op = self.learning_rate.assign(
self.learning_rate * hps.learning_rate_decay_factor)
# Dropout the data.
dataset_do_bxtxd = tf.nn.dropout(tf.to_float(dataset_ph), keep_prob)
if hps.ext_input_dim > 0:
ext_input_do_bxtxi = tf.nn.dropout(ext_input_bxtxi, keep_prob)
else:
ext_input_do_bxtxi = None
# ENCODERS
def encode_data(dataset_bxtxd, enc_cell, name, forward_or_reverse,
num_steps_to_encode):
"""Encode data for LFADS
Args:
dataset_bxtxd - the data to encode, as a 3 tensor, with dims
time x batch x data dims.
enc_cell: encoder cell
name: name of encoder
forward_or_reverse: string, encode in forward or reverse direction
num_steps_to_encode: number of steps to encode, 0:num_steps_to_encode
Returns:
encoded data as a list with num_steps_to_encode items, in order
"""
if forward_or_reverse == "forward":
dstr = "_fwd"
time_fwd_or_rev = range(num_steps_to_encode)
else:
dstr = "_rev"
time_fwd_or_rev = reversed(range(num_steps_to_encode))
with tf.variable_scope(name+"_enc"+dstr, reuse=False):
enc_state = tf.tile(
tf.Variable(tf.zeros([1, enc_cell.state_size]),
name=name+"_enc_t0"+dstr), tf.stack([batch_size, 1]))
enc_state.set_shape([None, enc_cell.state_size]) # tile loses shape
enc_outs = [None] * num_steps_to_encode
for i, t in enumerate(time_fwd_or_rev):
with tf.variable_scope(name+"_enc"+dstr, reuse=True if i > 0 else None):
dataset_t_bxd = dataset_bxtxd[:,t,:]
in_fac_t_bxf = tf.matmul(dataset_t_bxd, this_in_fac_W) + this_in_fac_b
in_fac_t_bxf.set_shape([None, used_in_factors_dim])
if ext_input_dim > 0 and not hps.inject_ext_input_to_gen:
ext_input_t_bxi = ext_input_do_bxtxi[:,t,:]
enc_input_t_bxfpe = tf.concat(
axis=1, values=[in_fac_t_bxf, ext_input_t_bxi])
else:
enc_input_t_bxfpe = in_fac_t_bxf
enc_out, enc_state = enc_cell(enc_input_t_bxfpe, enc_state)
enc_outs[t] = enc_out
return enc_outs
# Encode initial condition means and variances
# ([x_T, x_T-1, ... x_0] and [x_0, x_1, ... x_T] -> g0/c0)
self.ic_enc_fwd = [None] * num_steps
self.ic_enc_rev = [None] * num_steps
if ic_dim > 0:
enc_ic_cell = cell_class(hps.ic_enc_dim,
weight_scale=hps.cell_weight_scale,
clip_value=hps.cell_clip_value)
ic_enc_fwd = encode_data(dataset_do_bxtxd, enc_ic_cell,
"ic", "forward",
hps.num_steps_for_gen_ic)
ic_enc_rev = encode_data(dataset_do_bxtxd, enc_ic_cell,
"ic", "reverse",
hps.num_steps_for_gen_ic)
self.ic_enc_fwd = ic_enc_fwd
self.ic_enc_rev = ic_enc_rev
# Encoder control input means and variances, bi-directional encoding so:
# ([x_T, x_T-1, ..., x_0] and [x_0, x_1 ... x_T] -> u_t)
self.ci_enc_fwd = [None] * num_steps
self.ci_enc_rev = [None] * num_steps
if co_dim > 0:
enc_ci_cell = cell_class(hps.ci_enc_dim,
weight_scale=hps.cell_weight_scale,
clip_value=hps.cell_clip_value)
ci_enc_fwd = encode_data(dataset_do_bxtxd, enc_ci_cell,
"ci", "forward",
hps.num_steps)
if hps.do_causal_controller:
ci_enc_rev = None
else:
ci_enc_rev = encode_data(dataset_do_bxtxd, enc_ci_cell,
"ci", "reverse",
hps.num_steps)
self.ci_enc_fwd = ci_enc_fwd
self.ci_enc_rev = ci_enc_rev
# STOCHASTIC LATENT VARIABLES, priors and posteriors
# (initial conditions g0, and control inputs, u_t)
# Note that zs represent all the stochastic latent variables.
with tf.variable_scope("z", reuse=False):
self.prior_zs_g0 = None
self.posterior_zs_g0 = None
self.g0s_val = None
if ic_dim > 0:
self.prior_zs_g0 = \
LearnableDiagonalGaussian(batch_size, ic_dim, name="prior_g0",
mean_init=0.0,
var_min=hps.ic_prior_var_min,
var_init=hps.ic_prior_var_scale,
var_max=hps.ic_prior_var_max)
ic_enc = tf.concat(axis=1, values=[ic_enc_fwd[-1], ic_enc_rev[0]])
ic_enc = tf.nn.dropout(ic_enc, keep_prob)
self.posterior_zs_g0 = \
DiagonalGaussianFromInput(ic_enc, ic_dim, "ic_enc_2_post_g0",
var_min=hps.ic_post_var_min)
if kind in ["train", "posterior_sample_and_average"]:
zs_g0 = self.posterior_zs_g0
else:
zs_g0 = self.prior_zs_g0
if kind in ["train", "posterior_sample_and_average", "prior_sample"]:
self.g0s_val = zs_g0.sample
else:
self.g0s_val = zs_g0.mean
# Priors for controller, 'co' for controller output
self.prior_zs_co = prior_zs_co = [None] * num_steps
self.posterior_zs_co = posterior_zs_co = [None] * num_steps
self.zs_co = zs_co = [None] * num_steps
self.prior_zs_ar_con = None
if co_dim > 0:
# Controller outputs
autocorrelation_taus = [hps.prior_ar_atau for x in range(hps.co_dim)]
noise_variances = [hps.prior_ar_nvar for x in range(hps.co_dim)]
self.prior_zs_ar_con = prior_zs_ar_con = \
LearnableAutoRegressive1Prior(batch_size, hps.co_dim,
autocorrelation_taus,
noise_variances,
hps.do_train_prior_ar_atau,
hps.do_train_prior_ar_nvar,
num_steps, "u_prior_ar1")
# CONTROLLER -> GENERATOR -> RATES
# (u(t) -> gen(t) -> factors(t) -> rates(t) -> p(x_t|z_t) )
self.controller_outputs = u_t = [None] * num_steps
self.con_ics = con_state = None
self.con_states = con_states = [None] * num_steps
self.con_outs = con_outs = [None] * num_steps
self.gen_inputs = gen_inputs = [None] * num_steps
if co_dim > 0:
# gen_cell_class here for l2 penalty recurrent weights
# didn't split the cell_weight scale here, because I doubt it matters
con_cell = gen_cell_class(hps.con_dim,
input_weight_scale=hps.cell_weight_scale,
rec_weight_scale=hps.cell_weight_scale,
clip_value=hps.cell_clip_value,
recurrent_collections=['l2_con_reg'])
with tf.variable_scope("con", reuse=False):
self.con_ics = tf.tile(
tf.Variable(tf.zeros([1, hps.con_dim*con_cell.state_multiplier]), \
name="c0"),
tf.stack([batch_size, 1]))
self.con_ics.set_shape([None, con_cell.state_size]) # tile loses shape
con_states[-1] = self.con_ics
gen_cell = gen_cell_class(hps.gen_dim,
input_weight_scale=hps.gen_cell_input_weight_scale,
rec_weight_scale=hps.gen_cell_rec_weight_scale,
clip_value=hps.cell_clip_value,
recurrent_collections=['l2_gen_reg'])
with tf.variable_scope("gen", reuse=False):
if ic_dim == 0:
self.gen_ics = tf.tile(
tf.Variable(tf.zeros([1, gen_cell.state_size]), name="g0"),
tf.stack([batch_size, 1]))
else:
self.gen_ics = linear(self.g0s_val, gen_cell.state_size,
identity_if_possible=True,
name="g0_2_gen_ic")
self.gen_states = gen_states = [None] * num_steps
self.gen_outs = gen_outs = [None] * num_steps
gen_states[-1] = self.gen_ics
gen_outs[-1] = gen_cell.output_from_state(gen_states[-1])
self.factors = factors = [None] * num_steps
factors[-1] = linear(gen_outs[-1], factors_dim, do_bias=False,
normalized=True, name="gen_2_fac")
self.rates = rates = [None] * num_steps
# rates[-1] is collected to potentially feed back to controller
with tf.variable_scope("glm", reuse=False):
if hps.output_dist == 'poisson':
log_rates_t0 = tf.matmul(factors[-1], this_out_fac_W) + this_out_fac_b
log_rates_t0.set_shape([None, None])
rates[-1] = tf.exp(log_rates_t0) # rate
rates[-1].set_shape([None, hps.dataset_dims[hps.dataset_names[0]]])
elif hps.output_dist == 'gaussian':
mean_n_logvars = tf.matmul(factors[-1],this_out_fac_W) + this_out_fac_b
mean_n_logvars.set_shape([None, None])
means_t_bxd, logvars_t_bxd = tf.split(axis=1, num_or_size_splits=2,
value=mean_n_logvars)
rates[-1] = means_t_bxd
else:
assert False, "NIY"
# We support mulitple output distributions, for example Poisson, and also
# Gaussian. In these two cases respectively, there are one and two
# parameters (rates vs. mean and variance). So the output_dist_params
# tensor will variable sizes via tf.concat and tf.split, along the 1st
# dimension. So in the case of gaussian, for example, it'll be
# batch x (D+D), where each D dims is the mean, and then variances,
# respectively. For a distribution with 3 parameters, it would be
# batch x (D+D+D).
self.output_dist_params = dist_params = [None] * num_steps
self.log_p_xgz_b = log_p_xgz_b = 0.0 # log P(x|z)
for t in range(num_steps):
# Controller
if co_dim > 0:
# Build inputs for controller
tlag = t - hps.controller_input_lag
if tlag < 0:
con_in_f_t = tf.zeros_like(ci_enc_fwd[0])
else:
con_in_f_t = ci_enc_fwd[tlag]
if hps.do_causal_controller:
# If controller is causal (wrt to data generation process), then it
# cannot see future data. Thus, excluding ci_enc_rev[t] is obvious.
# Less obvious is the need to exclude factors[t-1]. This arises
# because information flows from g0 through factors to the controller
# input. The g0 encoding is backwards, so we must necessarily exclude
# the factors in order to keep the controller input purely from a
# forward encoding (however unlikely it is that
# g0->factors->controller channel might actually be used in this way).
con_in_list_t = [con_in_f_t]
else:
tlag_rev = t + hps.controller_input_lag
if tlag_rev >= num_steps:
# better than zeros
con_in_r_t = tf.zeros_like(ci_enc_rev[0])
else:
con_in_r_t = ci_enc_rev[tlag_rev]
con_in_list_t = [con_in_f_t, con_in_r_t]
if hps.do_feed_factors_to_controller:
if hps.feedback_factors_or_rates == "factors":
con_in_list_t.append(factors[t-1])
elif hps.feedback_factors_or_rates == "rates":
con_in_list_t.append(rates[t-1])
else:
assert False, "NIY"
con_in_t = tf.concat(axis=1, values=con_in_list_t)
con_in_t = tf.nn.dropout(con_in_t, keep_prob)
with tf.variable_scope("con", reuse=True if t > 0 else None):
con_outs[t], con_states[t] = con_cell(con_in_t, con_states[t-1])
posterior_zs_co[t] = \
DiagonalGaussianFromInput(con_outs[t], co_dim,
name="con_to_post_co")
if kind == "train":
u_t[t] = posterior_zs_co[t].sample
elif kind == "posterior_sample_and_average":
u_t[t] = posterior_zs_co[t].sample
else:
u_t[t] = prior_zs_ar_con.samples_t[t]
# Inputs to the generator (controller output + external input)
if ext_input_dim > 0 and hps.inject_ext_input_to_gen:
ext_input_t_bxi = ext_input_do_bxtxi[:,t,:]
if co_dim > 0:
gen_inputs[t] = tf.concat(axis=1, values=[u_t[t], ext_input_t_bxi])
else:
gen_inputs[t] = ext_input_t_bxi
else:
gen_inputs[t] = u_t[t]
# Generator
data_t_bxd = dataset_ph[:,t,:]
with tf.variable_scope("gen", reuse=True if t > 0 else None):
gen_outs[t], gen_states[t] = gen_cell(gen_inputs[t], gen_states[t-1])
gen_outs[t] = tf.nn.dropout(gen_outs[t], keep_prob)
with tf.variable_scope("gen", reuse=True): # ic defined it above
factors[t] = linear(gen_outs[t], factors_dim, do_bias=False,
normalized=True, name="gen_2_fac")
with tf.variable_scope("glm", reuse=True if t > 0 else None):
if hps.output_dist == 'poisson':
log_rates_t = tf.matmul(factors[t], this_out_fac_W) + this_out_fac_b
log_rates_t.set_shape([None, None])
rates[t] = dist_params[t] = tf.exp(log_rates_t) # rates feed back
rates[t].set_shape([None, hps.dataset_dims[hps.dataset_names[0]]])
loglikelihood_t = Poisson(log_rates_t).logp(data_t_bxd)
elif hps.output_dist == 'gaussian':
mean_n_logvars = tf.matmul(factors[t],this_out_fac_W) + this_out_fac_b
mean_n_logvars.set_shape([None, None])
means_t_bxd, logvars_t_bxd = tf.split(axis=1, num_or_size_splits=2,
value=mean_n_logvars)
rates[t] = means_t_bxd # rates feed back to controller
dist_params[t] = tf.concat(
axis=1, values=[means_t_bxd, tf.exp(logvars_t_bxd)])
loglikelihood_t = \
diag_gaussian_log_likelihood(data_t_bxd,
means_t_bxd, logvars_t_bxd)
else:
assert False, "NIY"
log_p_xgz_b += tf.reduce_sum(loglikelihood_t, [1])
# Correlation of inferred inputs cost.
self.corr_cost = tf.constant(0.0)
if hps.co_mean_corr_scale > 0.0:
all_sum_corr = []
for i in range(hps.co_dim):
for j in range(i+1, hps.co_dim):
sum_corr_ij = tf.constant(0.0)
for t in range(num_steps):
u_mean_t = posterior_zs_co[t].mean
sum_corr_ij += u_mean_t[:,i]*u_mean_t[:,j]
all_sum_corr.append(0.5 * tf.square(sum_corr_ij))
self.corr_cost = tf.reduce_mean(all_sum_corr) # div by batch and by n*(n-1)/2 pairs
# Variational Lower Bound on posterior, p(z|x), plus reconstruction cost.
# KL and reconstruction costs are normalized only by batch size, not by
# dimension, or by time steps.
kl_cost_g0_b = tf.zeros_like(batch_size, dtype=tf.float32)
kl_cost_co_b = tf.zeros_like(batch_size, dtype=tf.float32)
self.kl_cost = tf.constant(0.0) # VAE KL cost
self.recon_cost = tf.constant(0.0) # VAE reconstruction cost
self.nll_bound_vae = tf.constant(0.0)
self.nll_bound_iwae = tf.constant(0.0) # for eval with IWAE cost.
if kind in ["train", "posterior_sample_and_average"]:
kl_cost_g0_b = 0.0
kl_cost_co_b = 0.0
if ic_dim > 0:
g0_priors = [self.prior_zs_g0]
g0_posts = [self.posterior_zs_g0]
kl_cost_g0_b = KLCost_GaussianGaussian(g0_posts, g0_priors).kl_cost_b
kl_cost_g0_b = hps.kl_ic_weight * kl_cost_g0_b
if co_dim > 0:
kl_cost_co_b = \
KLCost_GaussianGaussianProcessSampled(
posterior_zs_co, prior_zs_ar_con).kl_cost_b
kl_cost_co_b = hps.kl_co_weight * kl_cost_co_b
# L = -KL + log p(x|z), to maximize bound on likelihood
# -L = KL - log p(x|z), to minimize bound on NLL
# so 'reconstruction cost' is negative log likelihood
self.recon_cost = - tf.reduce_mean(log_p_xgz_b)
self.kl_cost = tf.reduce_mean(kl_cost_g0_b + kl_cost_co_b)
lb_on_ll_b = log_p_xgz_b - kl_cost_g0_b - kl_cost_co_b
# VAE error averages outside the log
self.nll_bound_vae = -tf.reduce_mean(lb_on_ll_b)
# IWAE error averages inside the log
k = tf.cast(tf.shape(log_p_xgz_b)[0], tf.float32)
iwae_lb_on_ll = -tf.log(k) + log_sum_exp(lb_on_ll_b)
self.nll_bound_iwae = -iwae_lb_on_ll
# L2 regularization on the generator, normalized by number of parameters.
self.l2_cost = tf.constant(0.0)
if self.hps.l2_gen_scale > 0.0 or self.hps.l2_con_scale > 0.0:
l2_costs = []
l2_numels = []
l2_reg_var_lists = [tf.get_collection('l2_gen_reg'),
tf.get_collection('l2_con_reg')]
l2_reg_scales = [self.hps.l2_gen_scale, self.hps.l2_con_scale]
for l2_reg_vars, l2_scale in zip(l2_reg_var_lists, l2_reg_scales):
for v in l2_reg_vars:
numel = tf.reduce_prod(tf.concat(axis=0, values=tf.shape(v)))
numel_f = tf.cast(numel, tf.float32)
l2_numels.append(numel_f)
v_l2 = tf.reduce_sum(v*v)
l2_costs.append(0.5 * l2_scale * v_l2)
self.l2_cost = tf.add_n(l2_costs) / tf.add_n(l2_numels)
# Compute the cost for training, part of the graph regardless.
# The KL cost can be problematic at the beginning of optimization,
# so we allow an exponential increase in weighting the KL from 0
# to 1.
self.kl_decay_step = tf.maximum(self.train_step - hps.kl_start_step, 0)
self.l2_decay_step = tf.maximum(self.train_step - hps.l2_start_step, 0)
kl_decay_step_f = tf.cast(self.kl_decay_step, tf.float32)
l2_decay_step_f = tf.cast(self.l2_decay_step, tf.float32)
kl_increase_steps_f = tf.cast(hps.kl_increase_steps, tf.float32)
l2_increase_steps_f = tf.cast(hps.l2_increase_steps, tf.float32)
self.kl_weight = kl_weight = \
tf.minimum(kl_decay_step_f / kl_increase_steps_f, 1.0)
self.l2_weight = l2_weight = \
tf.minimum(l2_decay_step_f / l2_increase_steps_f, 1.0)
self.timed_kl_cost = kl_weight * self.kl_cost
self.timed_l2_cost = l2_weight * self.l2_cost
self.weight_corr_cost = hps.co_mean_corr_scale * self.corr_cost
self.cost = self.recon_cost + self.timed_kl_cost + \
self.timed_l2_cost + self.weight_corr_cost
if kind != "train":
# save every so often
self.seso_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep)
# lowest validation error
self.lve_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep_lve)
return
# OPTIMIZATION
# train the io matrices only
if self.hps.do_train_io_only:
self.train_vars = tvars = \
tf.get_collection('IO_transformations',
scope=tf.get_variable_scope().name)
# train the encoder only
elif self.hps.do_train_encoder_only:
tvars1 = \
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope='LFADS/ic_enc_*')
tvars2 = \
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope='LFADS/z/ic_enc_*')
self.train_vars = tvars = tvars1 + tvars2
# train all variables
else:
self.train_vars = tvars = \
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope=tf.get_variable_scope().name)
print("done.")
print("Model Variables (to be optimized): ")
total_params = 0
for i in range(len(tvars)):
shape = tvars[i].get_shape().as_list()
print(" ", i, tvars[i].name, shape)
total_params += np.prod(shape)
print("Total model parameters: ", total_params)
grads = tf.gradients(self.cost, tvars)
grads, grad_global_norm = tf.clip_by_global_norm(grads, hps.max_grad_norm)
opt = tf.train.AdamOptimizer(self.learning_rate, beta1=0.9, beta2=0.999,
epsilon=1e-01)
self.grads = grads
self.grad_global_norm = grad_global_norm
self.train_op = opt.apply_gradients(
zip(grads, tvars), global_step=self.train_step)
self.seso_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep)
# lowest validation error
self.lve_saver = tf.train.Saver(tf.global_variables(),
max_to_keep=hps.max_ckpt_to_keep)
# SUMMARIES, used only during training.
# example summary
self.example_image = tf.placeholder(tf.float32, shape=[1,None,None,3],
name='image_tensor')
self.example_summ = tf.summary.image("LFADS example", self.example_image,
collections=["example_summaries"])
# general training summaries
self.lr_summ = tf.summary.scalar("Learning rate", self.learning_rate)
self.kl_weight_summ = tf.summary.scalar("KL weight", self.kl_weight)
self.l2_weight_summ = tf.summary.scalar("L2 weight", self.l2_weight)
self.corr_cost_summ = tf.summary.scalar("Corr cost", self.weight_corr_cost)
self.grad_global_norm_summ = tf.summary.scalar("Gradient global norm",
self.grad_global_norm)
if hps.co_dim > 0:
self.atau_summ = [None] * hps.co_dim
self.pvar_summ = [None] * hps.co_dim
for c in range(hps.co_dim):
self.atau_summ[c] = \
tf.summary.scalar("AR Autocorrelation taus " + str(c),
tf.exp(self.prior_zs_ar_con.logataus_1xu[0,c]))
self.pvar_summ[c] = \
tf.summary.scalar("AR Variances " + str(c),
tf.exp(self.prior_zs_ar_con.logpvars_1xu[0,c]))
# cost summaries, separated into different collections for
# training vs validation. We make placeholders for these, because
# even though the graph computes these costs on a per-batch basis,
# we want to report the more reliable metric of per-epoch cost.
kl_cost_ph = tf.placeholder(tf.float32, shape=[], name='kl_cost_ph')
self.kl_t_cost_summ = tf.summary.scalar("KL cost (train)", kl_cost_ph,
collections=["train_summaries"])
self.kl_v_cost_summ = tf.summary.scalar("KL cost (valid)", kl_cost_ph,
collections=["valid_summaries"])
l2_cost_ph = tf.placeholder(tf.float32, shape=[], name='l2_cost_ph')
self.l2_cost_summ = tf.summary.scalar("L2 cost", l2_cost_ph,
collections=["train_summaries"])
recon_cost_ph = tf.placeholder(tf.float32, shape=[], name='recon_cost_ph')
self.recon_t_cost_summ = tf.summary.scalar("Reconstruction cost (train)",
recon_cost_ph,
collections=["train_summaries"])
self.recon_v_cost_summ = tf.summary.scalar("Reconstruction cost (valid)",
recon_cost_ph,
collections=["valid_summaries"])
total_cost_ph = tf.placeholder(tf.float32, shape=[], name='total_cost_ph')
self.cost_t_summ = tf.summary.scalar("Total cost (train)", total_cost_ph,
collections=["train_summaries"])
self.cost_v_summ = tf.summary.scalar("Total cost (valid)", total_cost_ph,
collections=["valid_summaries"])
self.kl_cost_ph = kl_cost_ph
self.l2_cost_ph = l2_cost_ph
self.recon_cost_ph = recon_cost_ph
self.total_cost_ph = total_cost_ph
# Merged summaries, for easy coding later.
self.merged_examples = tf.summary.merge_all(key="example_summaries")
self.merged_generic = tf.summary.merge_all() # default key is 'summaries'
self.merged_train = tf.summary.merge_all(key="train_summaries")
self.merged_valid = tf.summary.merge_all(key="valid_summaries")
session = tf.get_default_session()
self.logfile = os.path.join(hps.lfads_save_dir, "lfads_log")
self.writer = tf.summary.FileWriter(self.logfile)
def build_feed_dict(self, train_name, data_bxtxd, ext_input_bxtxi=None,
keep_prob=None):
"""Build the feed dictionary, handles cases where there is no value defined.
Args:
train_name: The key into the datasets, to set the tf.case statement for
the proper readin / readout matrices.
data_bxtxd: The data tensor
ext_input_bxtxi (optional): The external input tensor
keep_prob: The drop out keep probability.
Returns:
The feed dictionary with TF tensors as keys and data as values, for use
with tf.Session.run()
"""
feed_dict = {}
B, T, _ = data_bxtxd.shape
feed_dict[self.dataName] = train_name
feed_dict[self.dataset_ph] = data_bxtxd
if self.ext_input is not None and ext_input_bxtxi is not None:
feed_dict[self.ext_input] = ext_input_bxtxi
if keep_prob is None:
feed_dict[self.keep_prob] = self.hps.keep_prob
else:
feed_dict[self.keep_prob] = keep_prob
return feed_dict
@staticmethod
def get_batch(data_extxd, ext_input_extxi=None, batch_size=None,
example_idxs=None):
"""Get a batch of data, either randomly chosen, or specified directly.
Args:
data_extxd: The data to model, numpy tensors with shape:
# examples x # time steps x # dimensions
ext_input_extxi (optional): The external inputs, numpy tensor with shape:
# examples x # time steps x # external input dimensions
batch_size: The size of the batch to return
example_idxs (optional): The example indices used to select examples.
Returns:
A tuple with two parts:
1. Batched data numpy tensor with shape:
batch_size x # time steps x # dimensions
2. Batched external input numpy tensor with shape:
batch_size x # time steps x # external input dims
"""
assert batch_size is not None or example_idxs is not None, "Problems"
E, T, D = data_extxd.shape
if example_idxs is None:
example_idxs = np.random.choice(E, batch_size)
ext_input_bxtxi = None
if ext_input_extxi is not None:
ext_input_bxtxi = ext_input_extxi[example_idxs,:,:]
return data_extxd[example_idxs,:,:], ext_input_bxtxi
@staticmethod
def example_idxs_mod_batch_size(nexamples, batch_size):
"""Given a number of examples, E, and a batch_size, B, generate indices
[0, 1, 2, ... B-1;
[B, B+1, ... 2*B-1;
...
]
returning those indices as a 2-dim tensor shaped like E/B x B. Note that
shape is only correct if E % B == 0. If not, then an extra row is generated
so that the remainder of examples is included. The extra examples are
explicitly to to the zero index (see randomize_example_idxs_mod_batch_size)
for randomized behavior.
Args:
nexamples: The number of examples to batch up.
batch_size: The size of the batch.
Returns:
2-dim tensor as described above.
"""
bmrem = batch_size - (nexamples % batch_size)
bmrem_examples = []
if bmrem < batch_size:
#bmrem_examples = np.zeros(bmrem, dtype=np.int32)
ridxs = np.random.permutation(nexamples)[0:bmrem].astype(np.int32)
bmrem_examples = np.sort(ridxs)
example_idxs = range(nexamples) + list(bmrem_examples)
example_idxs_e_x_edivb = np.reshape(example_idxs, [-1, batch_size])
return example_idxs_e_x_edivb, bmrem
@staticmethod
def randomize_example_idxs_mod_batch_size(nexamples, batch_size):
"""Indices 1:nexamples, randomized, in 2D form of
shape = (nexamples / batch_size) x batch_size. The remainder
is managed by drawing randomly from 1:nexamples.
Args:
nexamples: number of examples to randomize
batch_size: number of elements in batch
Returns:
The randomized, properly shaped indicies.
"""
assert nexamples > batch_size, "Problems"
bmrem = batch_size - nexamples % batch_size
bmrem_examples = []
if bmrem < batch_size:
bmrem_examples = np.random.choice(range(nexamples),
size=bmrem, replace=False)
example_idxs = range(nexamples) + list(bmrem_examples)
mixed_example_idxs = np.random.permutation(example_idxs)
example_idxs_e_x_edivb = np.reshape(mixed_example_idxs, [-1, batch_size])
return example_idxs_e_x_edivb, bmrem
def shuffle_spikes_in_time(self, data_bxtxd):
"""Shuffle the spikes in the temporal dimension. This is useful to
help the LFADS system avoid overfitting to individual spikes or fast
oscillations found in the data that are irrelevant to behavior. A
pure 'tabula rasa' approach would avoid this, but LFADS is sensitive
enough to pick up dynamics that you may not want.
Args:
data_bxtxd: numpy array of spike count data to be shuffled.
Returns:
S_bxtxd, a numpy array with the same dimensions and contents as
data_bxtxd, but shuffled appropriately.
"""
B, T, N = data_bxtxd.shape
w = self.hps.temporal_spike_jitter_width
if w == 0:
return data_bxtxd
max_counts = np.max(data_bxtxd)
S_bxtxd = np.zeros([B,T,N])
# Intuitively, shuffle spike occurances, 0 or 1, but since we have counts,
# Do it over and over again up to the max count.
for mc in range(1,max_counts+1):
idxs = np.nonzero(data_bxtxd >= mc)
data_ones = np.zeros_like(data_bxtxd)
data_ones[data_bxtxd >= mc] = 1
nfound = len(idxs[0])
shuffles_incrs_in_time = np.random.randint(-w, w, size=nfound)
shuffle_tidxs = idxs[1].copy()
shuffle_tidxs += shuffles_incrs_in_time
# Reflect on the boundaries to not lose mass.
shuffle_tidxs[shuffle_tidxs < 0] = -shuffle_tidxs[shuffle_tidxs < 0]
shuffle_tidxs[shuffle_tidxs > T-1] = \
(T-1)-(shuffle_tidxs[shuffle_tidxs > T-1] -(T-1))
for iii in zip(idxs[0], shuffle_tidxs, idxs[2]):
S_bxtxd[iii] += 1
return S_bxtxd
def shuffle_and_flatten_datasets(self, datasets, kind='train'):
"""Since LFADS supports multiple datasets in the same dynamical model,
we have to be careful to use all the data in a single training epoch. But
since the datasets my have different data dimensionality, we cannot batch
examples from data dictionaries together. Instead, we generate random
batches within each data dictionary, and then randomize these batches
while holding onto the dataname, so that when it's time to feed
the graph, the correct in/out matrices can be selected, per batch.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
kind: 'train' or 'valid'
Returns:
A flat list, in which each element is a pair ('name', indices).
"""
batch_size = self.hps.batch_size
ndatasets = len(datasets)
random_example_idxs = {}
epoch_idxs = {}
all_name_example_idx_pairs = []
kind_data = kind + '_data'
for name, data_dict in datasets.items():
nexamples, ntime, data_dim = data_dict[kind_data].shape
epoch_idxs[name] = 0
random_example_idxs, _ = \
self.randomize_example_idxs_mod_batch_size(nexamples, batch_size)
epoch_size = random_example_idxs.shape[0]
names = [name] * epoch_size
all_name_example_idx_pairs += zip(names, random_example_idxs)
np.random.shuffle(all_name_example_idx_pairs) # shuffle in place
return all_name_example_idx_pairs
def train_epoch(self, datasets, batch_size=None, do_save_ckpt=True):
"""Train the model through the entire dataset once.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
batch_size (optional): The batch_size to use
do_save_ckpt (optional): Should the routine save a checkpoint on this
training epoch?
Returns:
A tuple with 6 float values:
(total cost of the epoch, epoch reconstruction cost,
epoch kl cost, KL weight used this training epoch,
total l2 cost on generator, and the corresponding weight).
"""
ops_to_eval = [self.cost, self.recon_cost,
self.kl_cost, self.kl_weight,
self.l2_cost, self.l2_weight,
self.train_op]
collected_op_values = self.run_epoch(datasets, ops_to_eval, kind="train")
total_cost = total_recon_cost = total_kl_cost = 0.0
# normalizing by batch done in distributions.py
epoch_size = len(collected_op_values)
for op_values in collected_op_values:
total_cost += op_values[0]
total_recon_cost += op_values[1]
total_kl_cost += op_values[2]
kl_weight = collected_op_values[-1][3]
l2_cost = collected_op_values[-1][4]
l2_weight = collected_op_values[-1][5]
epoch_total_cost = total_cost / epoch_size
epoch_recon_cost = total_recon_cost / epoch_size
epoch_kl_cost = total_kl_cost / epoch_size
if do_save_ckpt:
session = tf.get_default_session()
checkpoint_path = os.path.join(self.hps.lfads_save_dir,
self.hps.checkpoint_name + '.ckpt')
self.seso_saver.save(session, checkpoint_path,
global_step=self.train_step)
return epoch_total_cost, epoch_recon_cost, epoch_kl_cost, \
kl_weight, l2_cost, l2_weight
def run_epoch(self, datasets, ops_to_eval, kind="train", batch_size=None,
do_collect=True, keep_prob=None):
"""Run the model through the entire dataset once.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
ops_to_eval: A list of tensorflow operations that will be evaluated in
the tf.session.run() call.
batch_size (optional): The batch_size to use
do_collect (optional): Should the routine collect all session.run
output as a list, and return it?
keep_prob (optional): The dropout keep probability.
Returns:
A list of lists, the internal list is the return for the ops for each
session.run() call. The outer list collects over the epoch.
"""
hps = self.hps
all_name_example_idx_pairs = \
self.shuffle_and_flatten_datasets(datasets, kind)
kind_data = kind + '_data'
kind_ext_input = kind + '_ext_input'
total_cost = total_recon_cost = total_kl_cost = 0.0
session = tf.get_default_session()
epoch_size = len(all_name_example_idx_pairs)
evaled_ops_list = []
for name, example_idxs in all_name_example_idx_pairs:
data_dict = datasets[name]
data_extxd = data_dict[kind_data]
if hps.output_dist == 'poisson' and hps.temporal_spike_jitter_width > 0:
data_extxd = self.shuffle_spikes_in_time(data_extxd)
ext_input_extxi = data_dict[kind_ext_input]
data_bxtxd, ext_input_bxtxi = self.get_batch(data_extxd, ext_input_extxi,
example_idxs=example_idxs)
feed_dict = self.build_feed_dict(name, data_bxtxd, ext_input_bxtxi,
keep_prob=keep_prob)
evaled_ops_np = session.run(ops_to_eval, feed_dict=feed_dict)
if do_collect:
evaled_ops_list.append(evaled_ops_np)
return evaled_ops_list
def summarize_all(self, datasets, summary_values):
"""Plot and summarize stuff in tensorboard.
Note that everything done in the current function is otherwise done on
a single, randomly selected dataset (except for summary_values, which are
passed in.)
Args:
datasets, the dictionary of datasets used in the study.
summary_values: These summary values are created from the training loop,
and so summarize the entire set of datasets.
"""
hps = self.hps
tr_kl_cost = summary_values['tr_kl_cost']
tr_recon_cost = summary_values['tr_recon_cost']
tr_total_cost = summary_values['tr_total_cost']
kl_weight = summary_values['kl_weight']
l2_weight = summary_values['l2_weight']
l2_cost = summary_values['l2_cost']
has_any_valid_set = summary_values['has_any_valid_set']
i = summary_values['nepochs']
session = tf.get_default_session()
train_summ, train_step = session.run([self.merged_train,
self.train_step],
feed_dict={self.l2_cost_ph:l2_cost,
self.kl_cost_ph:tr_kl_cost,
self.recon_cost_ph:tr_recon_cost,
self.total_cost_ph:tr_total_cost})
self.writer.add_summary(train_summ, train_step)
if has_any_valid_set:
ev_kl_cost = summary_values['ev_kl_cost']
ev_recon_cost = summary_values['ev_recon_cost']
ev_total_cost = summary_values['ev_total_cost']
eval_summ = session.run(self.merged_valid,
feed_dict={self.kl_cost_ph:ev_kl_cost,
self.recon_cost_ph:ev_recon_cost,
self.total_cost_ph:ev_total_cost})
self.writer.add_summary(eval_summ, train_step)
print("Epoch:%d, step:%d (TRAIN, VALID): total: %.2f, %.2f\
recon: %.2f, %.2f, kl: %.2f, %.2f, l2: %.5f,\
kl weight: %.2f, l2 weight: %.2f" % \
(i, train_step, tr_total_cost, ev_total_cost,
tr_recon_cost, ev_recon_cost, tr_kl_cost, ev_kl_cost,
l2_cost, kl_weight, l2_weight))
csv_outstr = "epoch,%d, step,%d, total,%.2f,%.2f, \
recon,%.2f,%.2f, kl,%.2f,%.2f, l2,%.5f, \
klweight,%.2f, l2weight,%.2f\n"% \
(i, train_step, tr_total_cost, ev_total_cost,
tr_recon_cost, ev_recon_cost, tr_kl_cost, ev_kl_cost,
l2_cost, kl_weight, l2_weight)
else:
print("Epoch:%d, step:%d TRAIN: total: %.2f recon: %.2f, kl: %.2f,\
l2: %.5f, kl weight: %.2f, l2 weight: %.2f" % \
(i, train_step, tr_total_cost, tr_recon_cost, tr_kl_cost,
l2_cost, kl_weight, l2_weight))
csv_outstr = "epoch,%d, step,%d, total,%.2f, recon,%.2f, kl,%.2f, \
l2,%.5f, klweight,%.2f, l2weight,%.2f\n"% \
(i, train_step, tr_total_cost, tr_recon_cost,
tr_kl_cost, l2_cost, kl_weight, l2_weight)
if self.hps.csv_log:
csv_file = os.path.join(self.hps.lfads_save_dir, self.hps.csv_log+'.csv')
with open(csv_file, "a") as myfile:
myfile.write(csv_outstr)
def plot_single_example(self, datasets):
"""Plot an image relating to a randomly chosen, specific example. We use
posterior sample and average by taking one example, and filling a whole
batch with that example, sample from the posterior, and then average the
quantities.
"""
hps = self.hps
all_data_names = datasets.keys()
data_name = np.random.permutation(all_data_names)[0]
data_dict = datasets[data_name]
has_valid_set = True if data_dict['valid_data'] is not None else False
cf = 1.0 # plotting concern
# posterior sample and average here
E, _, _ = data_dict['train_data'].shape
eidx = np.random.choice(E)
example_idxs = eidx * np.ones(hps.batch_size, dtype=np.int32)
train_data_bxtxd, train_ext_input_bxtxi = \
self.get_batch(data_dict['train_data'], data_dict['train_ext_input'],
example_idxs=example_idxs)
truth_train_data_bxtxd = None
if 'train_truth' in data_dict and data_dict['train_truth'] is not None:
truth_train_data_bxtxd, _ = self.get_batch(data_dict['train_truth'],
example_idxs=example_idxs)
cf = data_dict['conversion_factor']
# plotter does averaging
train_model_values = self.eval_model_runs_batch(data_name,
train_data_bxtxd,
train_ext_input_bxtxi,
do_average_batch=False)
train_step = train_model_values['train_steps']
feed_dict = self.build_feed_dict(data_name, train_data_bxtxd,
train_ext_input_bxtxi, keep_prob=1.0)
session = tf.get_default_session()
generic_summ = session.run(self.merged_generic, feed_dict=feed_dict)
self.writer.add_summary(generic_summ, train_step)
valid_data_bxtxd = valid_model_values = valid_ext_input_bxtxi = None
truth_valid_data_bxtxd = None
if has_valid_set:
E, _, _ = data_dict['valid_data'].shape
eidx = np.random.choice(E)
example_idxs = eidx * np.ones(hps.batch_size, dtype=np.int32)
valid_data_bxtxd, valid_ext_input_bxtxi = \
self.get_batch(data_dict['valid_data'],
data_dict['valid_ext_input'],
example_idxs=example_idxs)
if 'valid_truth' in data_dict and data_dict['valid_truth'] is not None:
truth_valid_data_bxtxd, _ = self.get_batch(data_dict['valid_truth'],
example_idxs=example_idxs)
else:
truth_valid_data_bxtxd = None
# plotter does averaging
valid_model_values = self.eval_model_runs_batch(data_name,
valid_data_bxtxd,
valid_ext_input_bxtxi,
do_average_batch=False)
example_image = plot_lfads(train_bxtxd=train_data_bxtxd,
train_model_vals=train_model_values,
train_ext_input_bxtxi=train_ext_input_bxtxi,
train_truth_bxtxd=truth_train_data_bxtxd,
valid_bxtxd=valid_data_bxtxd,
valid_model_vals=valid_model_values,
valid_ext_input_bxtxi=valid_ext_input_bxtxi,
valid_truth_bxtxd=truth_valid_data_bxtxd,
bidx=None, cf=cf, output_dist=hps.output_dist)
example_image = np.expand_dims(example_image, axis=0)
example_summ = session.run(self.merged_examples,
feed_dict={self.example_image : example_image})
self.writer.add_summary(example_summ)
def train_model(self, datasets):
"""Train the model, print per-epoch information, and save checkpoints.
Loop over training epochs. The function that actually does the
training is train_epoch. This function iterates over the training
data, one epoch at a time. The learning rate schedule is such
that it will stay the same until the cost goes up in comparison to
the last few values, then it will drop.
Args:
datasets: A dict of data dicts. The dataset dict is simply a
name(string)-> data dictionary mapping (See top of lfads.py).
"""
hps = self.hps
has_any_valid_set = False
for data_dict in datasets.values():
if data_dict['valid_data'] is not None:
has_any_valid_set = True
break
session = tf.get_default_session()
lr = session.run(self.learning_rate)
lr_stop = hps.learning_rate_stop
i = -1
train_costs = []
valid_costs = []
ev_total_cost = ev_recon_cost = ev_kl_cost = 0.0
lowest_ev_cost = np.Inf
while True:
i += 1
do_save_ckpt = True if i % 10 ==0 else False
tr_total_cost, tr_recon_cost, tr_kl_cost, kl_weight, l2_cost, l2_weight = \
self.train_epoch(datasets, do_save_ckpt=do_save_ckpt)
# Evaluate the validation cost, and potentially save. Note that this
# routine will not save a validation checkpoint until the kl weight and
# l2 weights are equal to 1.0.
if has_any_valid_set:
ev_total_cost, ev_recon_cost, ev_kl_cost = \
self.eval_cost_epoch(datasets, kind='valid')
valid_costs.append(ev_total_cost)
# > 1 may give more consistent results, but not the actual lowest vae.
# == 1 gives the lowest vae seen so far.
n_lve = 1
run_avg_lve = np.mean(valid_costs[-n_lve:])
# conditions for saving checkpoints:
# KL weight must have finished stepping (>=1.0), AND
# L2 weight must have finished stepping OR L2 is not being used, AND
# the current run has a lower LVE than previous runs AND
# len(valid_costs > n_lve) (not sure what that does)
if kl_weight >= 1.0 and \
(l2_weight >= 1.0 or \
(self.hps.l2_gen_scale == 0.0 and self.hps.l2_con_scale == 0.0)) \
and (len(valid_costs) > n_lve and run_avg_lve < lowest_ev_cost):
lowest_ev_cost = run_avg_lve
checkpoint_path = os.path.join(self.hps.lfads_save_dir,
self.hps.checkpoint_name + '_lve.ckpt')
self.lve_saver.save(session, checkpoint_path,
global_step=self.train_step,
latest_filename='checkpoint_lve')
# Plot and summarize.
values = {'nepochs':i, 'has_any_valid_set': has_any_valid_set,
'tr_total_cost':tr_total_cost, 'ev_total_cost':ev_total_cost,
'tr_recon_cost':tr_recon_cost, 'ev_recon_cost':ev_recon_cost,
'tr_kl_cost':tr_kl_cost, 'ev_kl_cost':ev_kl_cost,
'l2_weight':l2_weight, 'kl_weight':kl_weight,
'l2_cost':l2_cost}
self.summarize_all(datasets, values)
self.plot_single_example(datasets)
# Manage learning rate.
train_res = tr_total_cost
n_lr = hps.learning_rate_n_to_compare
if len(train_costs) > n_lr and train_res > np.max(train_costs[-n_lr:]):
_ = session.run(self.learning_rate_decay_op)
lr = session.run(self.learning_rate)
print(" Decreasing learning rate to %f." % lr)
# Force the system to run n_lr times while at this lr.
train_costs.append(np.inf)
else:
train_costs.append(train_res)
if lr < lr_stop:
print("Stopping optimization based on learning rate criteria.")
break
def eval_cost_epoch(self, datasets, kind='train', ext_input_extxi=None,
batch_size=None):
"""Evaluate the cost of the epoch.
Args:
data_dict: The dictionary of data (training and validation) used for
training and evaluation of the model, respectively.
Returns:
a 3 tuple of costs:
(epoch total cost, epoch reconstruction cost, epoch KL cost)
"""
ops_to_eval = [self.cost, self.recon_cost, self.kl_cost]
collected_op_values = self.run_epoch(datasets, ops_to_eval, kind=kind,
keep_prob=1.0)
total_cost = total_recon_cost = total_kl_cost = 0.0
# normalizing by batch done in distributions.py
epoch_size = len(collected_op_values)
for op_values in collected_op_values:
total_cost += op_values[0]
total_recon_cost += op_values[1]
total_kl_cost += op_values[2]
epoch_total_cost = total_cost / epoch_size
epoch_recon_cost = total_recon_cost / epoch_size
epoch_kl_cost = total_kl_cost / epoch_size
return epoch_total_cost, epoch_recon_cost, epoch_kl_cost
def eval_model_runs_batch(self, data_name, data_bxtxd, ext_input_bxtxi=None,
do_eval_cost=False, do_average_batch=False):
"""Returns all the goodies for the entire model, per batch.
Args:
data_name: The name of the data dict, to select which in/out matrices
to use.
data_bxtxd: Numpy array training data with shape:
batch_size x # time steps x # dimensions
ext_input_bxtxi: Numpy array training external input with shape:
batch_size x # time steps x # external input dims
do_eval_cost (optional): If true, the IWAE (Importance Weighted
Autoencoder) log likeihood bound, instead of the VAE version.
do_average_batch (optional): average over the batch, useful for getting
good IWAE costs, and model outputs for a single data point.
Returns:
A dictionary with the outputs of the model decoder, namely:
prior g0 mean, prior g0 variance, approx. posterior mean, approx
posterior mean, the generator initial conditions, the control inputs (if
enabled), the state of the generator, the factors, and the rates.
"""
session = tf.get_default_session()
feed_dict = self.build_feed_dict(data_name, data_bxtxd,
ext_input_bxtxi, keep_prob=1.0)
# Non-temporal signals will be batch x dim.
# Temporal signals are list length T with elements batch x dim.
tf_vals = [self.gen_ics, self.gen_states, self.factors,
self.output_dist_params]
tf_vals.append(self.cost)
tf_vals.append(self.nll_bound_vae)
tf_vals.append(self.nll_bound_iwae)
tf_vals.append(self.train_step) # not train_op!
if self.hps.ic_dim > 0:
tf_vals += [self.prior_zs_g0.mean, self.prior_zs_g0.logvar,
self.posterior_zs_g0.mean, self.posterior_zs_g0.logvar]
if self.hps.co_dim > 0:
tf_vals.append(self.controller_outputs)
tf_vals_flat, fidxs = flatten(tf_vals)
np_vals_flat = session.run(tf_vals_flat, feed_dict=feed_dict)
ff = 0
gen_ics = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
gen_states = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
factors = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
out_dist_params = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
costs = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
nll_bound_vaes = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
nll_bound_iwaes = [np_vals_flat[f] for f in fidxs[ff]]; ff +=1
train_steps = [np_vals_flat[f] for f in fidxs[ff]]; ff +=1
if self.hps.ic_dim > 0:
prior_g0_mean = [np_vals_flat[f] for f in fidxs[ff]]; ff +=1
prior_g0_logvar = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
post_g0_mean = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
post_g0_logvar = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
if self.hps.co_dim > 0:
controller_outputs = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
# [0] are to take out the non-temporal items from lists
gen_ics = gen_ics[0]
costs = costs[0]
nll_bound_vaes = nll_bound_vaes[0]
nll_bound_iwaes = nll_bound_iwaes[0]
train_steps = train_steps[0]
# Convert to full tensors, not lists of tensors in time dim.
gen_states = list_t_bxn_to_tensor_bxtxn(gen_states)
factors = list_t_bxn_to_tensor_bxtxn(factors)
out_dist_params = list_t_bxn_to_tensor_bxtxn(out_dist_params)
if self.hps.ic_dim > 0:
prior_g0_mean = prior_g0_mean[0]
prior_g0_logvar = prior_g0_logvar[0]
post_g0_mean = post_g0_mean[0]
post_g0_logvar = post_g0_logvar[0]
if self.hps.co_dim > 0:
controller_outputs = list_t_bxn_to_tensor_bxtxn(controller_outputs)
if do_average_batch:
gen_ics = np.mean(gen_ics, axis=0)
gen_states = np.mean(gen_states, axis=0)
factors = np.mean(factors, axis=0)
out_dist_params = np.mean(out_dist_params, axis=0)
if self.hps.ic_dim > 0:
prior_g0_mean = np.mean(prior_g0_mean, axis=0)
prior_g0_logvar = np.mean(prior_g0_logvar, axis=0)
post_g0_mean = np.mean(post_g0_mean, axis=0)
post_g0_logvar = np.mean(post_g0_logvar, axis=0)
if self.hps.co_dim > 0:
controller_outputs = np.mean(controller_outputs, axis=0)
model_vals = {}
model_vals['gen_ics'] = gen_ics
model_vals['gen_states'] = gen_states
model_vals['factors'] = factors
model_vals['output_dist_params'] = out_dist_params
model_vals['costs'] = costs
model_vals['nll_bound_vaes'] = nll_bound_vaes
model_vals['nll_bound_iwaes'] = nll_bound_iwaes
model_vals['train_steps'] = train_steps
if self.hps.ic_dim > 0:
model_vals['prior_g0_mean'] = prior_g0_mean
model_vals['prior_g0_logvar'] = prior_g0_logvar
model_vals['post_g0_mean'] = post_g0_mean
model_vals['post_g0_logvar'] = post_g0_logvar
if self.hps.co_dim > 0:
model_vals['controller_outputs'] = controller_outputs
return model_vals
def eval_model_runs_avg_epoch(self, data_name, data_extxd,
ext_input_extxi=None):
"""Returns all the expected value for goodies for the entire model.
The expected value is taken over hidden (z) variables, namely the initial
conditions and the control inputs. The expected value is approximate, and
accomplished via sampling (batch_size) samples for every examples.
Args:
data_name: The name of the data dict, to select which in/out matrices
to use.
data_extxd: Numpy array training data with shape:
# examples x # time steps x # dimensions
ext_input_extxi (optional): Numpy array training external input with
shape: # examples x # time steps x # external input dims
Returns:
A dictionary with the averaged outputs of the model decoder, namely:
prior g0 mean, prior g0 variance, approx. posterior mean, approx
posterior mean, the generator initial conditions, the control inputs (if
enabled), the state of the generator, the factors, and the output
distribution parameters, e.g. (rates or mean and variances).
"""
hps = self.hps
batch_size = hps.batch_size
E, T, D = data_extxd.shape
E_to_process = hps.ps_nexamples_to_process
if E_to_process > E:
print("Setting number of posterior samples to process to : ", E)
E_to_process = E
if hps.ic_dim > 0:
prior_g0_mean = np.zeros([E_to_process, hps.ic_dim])
prior_g0_logvar = np.zeros([E_to_process, hps.ic_dim])
post_g0_mean = np.zeros([E_to_process, hps.ic_dim])
post_g0_logvar = np.zeros([E_to_process, hps.ic_dim])
if hps.co_dim > 0:
controller_outputs = np.zeros([E_to_process, T, hps.co_dim])
gen_ics = np.zeros([E_to_process, hps.gen_dim])
gen_states = np.zeros([E_to_process, T, hps.gen_dim])
factors = np.zeros([E_to_process, T, hps.factors_dim])
if hps.output_dist == 'poisson':
out_dist_params = np.zeros([E_to_process, T, D])
elif hps.output_dist == 'gaussian':
out_dist_params = np.zeros([E_to_process, T, D+D])
else:
assert False, "NIY"
costs = np.zeros(E_to_process)
nll_bound_vaes = np.zeros(E_to_process)
nll_bound_iwaes = np.zeros(E_to_process)
train_steps = np.zeros(E_to_process)
for es_idx in range(E_to_process):
print("Running %d of %d." % (es_idx+1, E_to_process))
example_idxs = es_idx * np.ones(batch_size, dtype=np.int32)
data_bxtxd, ext_input_bxtxi = self.get_batch(data_extxd,
ext_input_extxi,
batch_size=batch_size,
example_idxs=example_idxs)
model_values = self.eval_model_runs_batch(data_name, data_bxtxd,
ext_input_bxtxi,
do_eval_cost=True,
do_average_batch=True)
if self.hps.ic_dim > 0:
prior_g0_mean[es_idx,:] = model_values['prior_g0_mean']
prior_g0_logvar[es_idx,:] = model_values['prior_g0_logvar']
post_g0_mean[es_idx,:] = model_values['post_g0_mean']
post_g0_logvar[es_idx,:] = model_values['post_g0_logvar']
gen_ics[es_idx,:] = model_values['gen_ics']
if self.hps.co_dim > 0:
controller_outputs[es_idx,:,:] = model_values['controller_outputs']
gen_states[es_idx,:,:] = model_values['gen_states']
factors[es_idx,:,:] = model_values['factors']
out_dist_params[es_idx,:,:] = model_values['output_dist_params']
costs[es_idx] = model_values['costs']
nll_bound_vaes[es_idx] = model_values['nll_bound_vaes']
nll_bound_iwaes[es_idx] = model_values['nll_bound_iwaes']
train_steps[es_idx] = model_values['train_steps']
print('bound nll(vae): %.3f, bound nll(iwae): %.3f' \
% (nll_bound_vaes[es_idx], nll_bound_iwaes[es_idx]))
model_runs = {}
if self.hps.ic_dim > 0:
model_runs['prior_g0_mean'] = prior_g0_mean
model_runs['prior_g0_logvar'] = prior_g0_logvar
model_runs['post_g0_mean'] = post_g0_mean
model_runs['post_g0_logvar'] = post_g0_logvar
model_runs['gen_ics'] = gen_ics
if self.hps.co_dim > 0:
model_runs['controller_outputs'] = controller_outputs
model_runs['gen_states'] = gen_states
model_runs['factors'] = factors
model_runs['output_dist_params'] = out_dist_params
model_runs['costs'] = costs
model_runs['nll_bound_vaes'] = nll_bound_vaes
model_runs['nll_bound_iwaes'] = nll_bound_iwaes
model_runs['train_steps'] = train_steps
return model_runs
def write_model_runs(self, datasets, output_fname=None):
"""Run the model on the data in data_dict, and save the computed values.
LFADS generates a number of outputs for each examples, and these are all
saved. They are:
The mean and variance of the prior of g0.
The mean and variance of approximate posterior of g0.
The control inputs (if enabled)
The initial conditions, g0, for all examples.
The generator states for all time.
The factors for all time.
The output distribution parameters (e.g. rates) for all time.
Args:
datasets: a dictionary of named data_dictionaries, see top of lfads.py
output_fname: a file name stem for the output files.
"""
hps = self.hps
kind = hps.kind
for data_name, data_dict in datasets.items():
data_tuple = [('train', data_dict['train_data'],
data_dict['train_ext_input']),
('valid', data_dict['valid_data'],
data_dict['valid_ext_input'])]
for data_kind, data_extxd, ext_input_extxi in data_tuple:
if not output_fname:
fname = "model_runs_" + data_name + '_' + data_kind + '_' + kind
else:
fname = output_fname + data_name + '_' + data_kind + '_' + kind
print("Writing data for %s data and kind %s." % (data_name, data_kind))
model_runs = self.eval_model_runs_avg_epoch(data_name, data_extxd,
ext_input_extxi)
full_fname = os.path.join(hps.lfads_save_dir, fname)
write_data(full_fname, model_runs, compression='gzip')
print("Done.")
def write_model_samples(self, dataset_name, output_fname=None):
"""Use the prior distribution to generate batch_size number of samples
from the model.
LFADS generates a number of outputs for each sample, and these are all
saved. They are:
The mean and variance of the prior of g0.
The control inputs (if enabled)
The initial conditions, g0, for all examples.
The generator states for all time.
The factors for all time.
The output distribution parameters (e.g. rates) for all time.
Args:
dataset_name: The name of the dataset to grab the factors -> rates
alignment matrices from.
output_fname: The name of the file in which to save the generated
samples.
"""
hps = self.hps
batch_size = hps.batch_size
print("Generating %d samples" % (batch_size))
tf_vals = [self.factors, self.gen_states, self.gen_ics,
self.cost, self.output_dist_params]
if hps.ic_dim > 0:
tf_vals += [self.prior_zs_g0.mean, self.prior_zs_g0.logvar]
if hps.co_dim > 0:
tf_vals += [self.prior_zs_ar_con.samples_t]
tf_vals_flat, fidxs = flatten(tf_vals)
session = tf.get_default_session()
feed_dict = {}
feed_dict[self.dataName] = dataset_name
feed_dict[self.keep_prob] = 1.0
np_vals_flat = session.run(tf_vals_flat, feed_dict=feed_dict)
ff = 0
factors = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
gen_states = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
gen_ics = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
costs = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
output_dist_params = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
if hps.ic_dim > 0:
prior_g0_mean = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
prior_g0_logvar = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
if hps.co_dim > 0:
prior_zs_ar_con = [np_vals_flat[f] for f in fidxs[ff]]; ff += 1
# [0] are to take out the non-temporal items from lists
gen_ics = gen_ics[0]
costs = costs[0]
# Convert to full tensors, not lists of tensors in time dim.
gen_states = list_t_bxn_to_tensor_bxtxn(gen_states)
factors = list_t_bxn_to_tensor_bxtxn(factors)
output_dist_params = list_t_bxn_to_tensor_bxtxn(output_dist_params)
if hps.ic_dim > 0:
prior_g0_mean = prior_g0_mean[0]
prior_g0_logvar = prior_g0_logvar[0]
if hps.co_dim > 0:
prior_zs_ar_con = list_t_bxn_to_tensor_bxtxn(prior_zs_ar_con)
model_vals = {}
model_vals['gen_ics'] = gen_ics
model_vals['gen_states'] = gen_states
model_vals['factors'] = factors
model_vals['output_dist_params'] = output_dist_params
model_vals['costs'] = costs.reshape(1)
if hps.ic_dim > 0:
model_vals['prior_g0_mean'] = prior_g0_mean
model_vals['prior_g0_logvar'] = prior_g0_logvar
if hps.co_dim > 0:
model_vals['prior_zs_ar_con'] = prior_zs_ar_con
full_fname = os.path.join(hps.lfads_save_dir, output_fname)
write_data(full_fname, model_vals, compression='gzip')
print("Done.")
@staticmethod
def eval_model_parameters(use_nested=True, include_strs=None):
"""Evaluate and return all of the TF variables in the model.
Args:
use_nested (optional): For returning values, use a nested dictoinary, based
on variable scoping, or return all variables in a flat dictionary.
include_strs (optional): A list of strings to use as a filter, to reduce the
number of variables returned. A variable name must contain at least one
string in include_strs as a sub-string in order to be returned.
Returns:
The parameters of the model. This can be in a flat
dictionary, or a nested dictionary, where the nesting is by variable
scope.
"""
all_tf_vars = tf.global_variables()
session = tf.get_default_session()
all_tf_vars_eval = session.run(all_tf_vars)
vars_dict = {}
strs = ["LFADS"]
if include_strs:
strs += include_strs
for i, (var, var_eval) in enumerate(zip(all_tf_vars, all_tf_vars_eval)):
if any(s in include_strs for s in var.name):
if not isinstance(var_eval, np.ndarray): # for H5PY
print(var.name, """ is not numpy array, saving as numpy array
with value: """, var_eval, type(var_eval))
e = np.array(var_eval)
print(e, type(e))
else:
e = var_eval
vars_dict[var.name] = e
if not use_nested:
return vars_dict
var_names = vars_dict.keys()
nested_vars_dict = {}
current_dict = nested_vars_dict
for v, var_name in enumerate(var_names):
var_split_name_list = var_name.split('/')
split_name_list_len = len(var_split_name_list)
current_dict = nested_vars_dict
for p, part in enumerate(var_split_name_list):
if p < split_name_list_len - 1:
if part in current_dict:
current_dict = current_dict[part]
else:
current_dict[part] = {}
current_dict = current_dict[part]
else:
current_dict[part] = vars_dict[var_name]
return nested_vars_dict
@staticmethod
def spikify_rates(rates_bxtxd):
"""Randomly spikify underlying rates according a Poisson distribution
Args:
rates_bxtxd: a numpy tensor with shape:
Returns:
A numpy array with the same shape as rates_bxtxd, but with the event
counts.
"""
B,T,N = rates_bxtxd.shape
assert all([B > 0, N > 0]), "problems"
# Because the rates are changing, there is nesting
spikes_bxtxd = np.zeros([B,T,N], dtype=np.int32)
for b in range(B):
for t in range(T):
for n in range(N):
rate = rates_bxtxd[b,t,n]
count = np.random.poisson(rate)
spikes_bxtxd[b,t,n] = count
return spikes_bxtxd
| apache-2.0 | 7,097,405,529,075,089,000 | 42.418825 | 89 | 0.59828 | false |
ali-abdullah/nlp | lda.py | 1 | 1146 | import gensim
import logging
import os.path
class LineCorpus(gensim.corpora.textcorpus.TextCorpus):
# Creates the corpus object that reads the document line by line
def get_texts(self):
with open(self.input) as f:
for l in f:
yield l.split()
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
if os.path.isfile('lda_model'):
# Check if the model has previously been created
# if it has load the model and print out the different topics
print("lda model was found")
model = gensim.models.LdaModel.load('lda_model')
print("number of topics : ")
print(model.num_topics)
for i in range(0,model.num_topics - 1):
print("topic number : ")
print(i)
print(model.print_topic(i))
doc = ['wonderful', 'action', 'packed', 'movie', 'steven', 'seagal', 'five', 'star']
bow = model.id2word.doc2bow(doc)
topic_analysis = model[bow]
print(topic_analysis)
else:
corpus = LineCorpus('reviews.txt')
print("creating lda model")
model = gensim.models.LdaModel(corpus, id2word=corpus.dictionary, alpha='auto', num_topics=10, passes=5)
model.save('lda_model')
| mit | -7,397,097,240,201,630,000 | 32.705882 | 105 | 0.691972 | false |
jmwright/cadquery-freecad-module | Libs/pint/testsuite/parameterized.py | 1 | 5114 | # -*- coding: utf-8 -*-
#
# Adds Parameterized tests for Python's unittest module
#
# Code from: parameterizedtestcase, version: 0.1.0
# Homepage: https://github.com/msabramo/python_unittest_parameterized_test_case
# Author: Marc Abramowitz, email: marc@marc-abramowitz.com
# License: MIT
#
# Fixed for to work in Python 2 & 3 with "add_metaclass" decorator from six
# https://pypi.python.org/pypi/six
# Author: Benjamin Peterson
# License: MIT
#
# Use like this:
#
# from parameterizedtestcase import ParameterizedTestCase
#
# class MyTests(ParameterizedTestCase):
# @ParameterizedTestCase.parameterize(
# ("input", "expected_output"),
# [
# ("2+4", 6),
# ("3+5", 8),
# ("6*9", 54),
# ]
# )
# def test_eval(self, input, expected_output):
# self.assertEqual(eval(input), expected_output)
try:
import unittest2 as unittest
except ImportError: # pragma: no cover
import unittest
from functools import wraps
import collections
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def augment_method_docstring(method, new_class_dict, classname,
param_names, param_values, new_method):
param_assignments_str = '; '.join(
['%s = %s' % (k, v) for (k, v) in zip(param_names, param_values)])
extra_doc = "%s (%s.%s) [with %s] " % (
method.__name__, new_class_dict.get('__module__', '<module>'),
classname, param_assignments_str)
try:
new_method.__doc__ = extra_doc + new_method.__doc__
except TypeError: # Catches when new_method.__doc__ is None
new_method.__doc__ = extra_doc
class ParameterizedTestCaseMetaClass(type):
method_counter = {}
def __new__(meta, classname, bases, class_dict):
new_class_dict = {}
for attr_name, attr_value in list(class_dict.items()):
if isinstance(attr_value, collections.Callable) and hasattr(attr_value, 'param_names'):
# print("Processing attr_name = %r; attr_value = %r" % (
# attr_name, attr_value))
method = attr_value
param_names = attr_value.param_names
data = attr_value.data
func_name_format = attr_value.func_name_format
meta.process_method(
classname, method, param_names, data, new_class_dict,
func_name_format)
else:
new_class_dict[attr_name] = attr_value
return type.__new__(meta, classname, bases, new_class_dict)
@classmethod
def process_method(
cls, classname, method, param_names, data, new_class_dict,
func_name_format):
method_counter = cls.method_counter
for param_values in data:
new_method = cls.new_method(method, param_values)
method_counter[method.__name__] = \
method_counter.get(method.__name__, 0) + 1
case_data = dict(list(zip(param_names, param_values)))
case_data['func_name'] = method.__name__
case_data['case_num'] = method_counter[method.__name__]
new_method.__name__ = func_name_format.format(**case_data)
augment_method_docstring(
method, new_class_dict, classname,
param_names, param_values, new_method)
new_class_dict[new_method.__name__] = new_method
@classmethod
def new_method(cls, method, param_values):
@wraps(method)
def new_method(self):
return method(self, *param_values)
return new_method
@add_metaclass(ParameterizedTestCaseMetaClass)
class ParameterizedTestMixin(object):
@classmethod
def parameterize(cls, param_names, data,
func_name_format='{func_name}_{case_num:05d}'):
"""Decorator for parameterizing a test method - example:
@ParameterizedTestCase.parameterize(
("isbn", "expected_title"), [
("0262033844", "Introduction to Algorithms"),
("0321558146", "Campbell Essential Biology")])
"""
def decorator(func):
@wraps(func)
def newfunc(*arg, **kwargs):
return func(*arg, **kwargs)
newfunc.param_names = param_names
newfunc.data = data
newfunc.func_name_format = func_name_format
return newfunc
return decorator
@add_metaclass(ParameterizedTestCaseMetaClass)
class ParameterizedTestCase(unittest.TestCase, ParameterizedTestMixin):
pass
| lgpl-3.0 | 1,117,024,097,544,180,100 | 32.644737 | 99 | 0.581736 | false |
lsaffre/djangosite | djangosite/management/commands/run.py | 1 | 1763 | # -*- coding: UTF-8 -*-
# Copyright 2012-2013 by Luc Saffre.
# License: BSD, see LICENSE for more details.
"""
.. management_command:: run
Execute a standalone Python script after having set up the Django
environment. Also modify `sys.args`, `__file__` and `__name__` so that
the invoked script sees them as if it had been called directly.
This is yet another answer to the frequently asked Django question
about how to run standalone Django scripts
(`[1] <http://stackoverflow.com/questions/4847469/use-django-from-python-manage-py-shell-to-python-script>`__,
`[2] <http://www.b-list.org/weblog/2007/sep/22/standalone-django-scripts/>`__).
It is almost the same as redirecting stdin of Django's ``shell`` command
(i.e. doing ``python manage.py shell < myscript.py``),
but with the possibility of using command line arguments
and without the disturbing messages from the interactive console.
For example if you have a file `myscript.py` with the following content...
::
from myapp.models import Partner
print Partner.objects.all()
... then you can run this script using::
$ python manage.py run myscript.py
[<Partner: Rumma & Ko OÜ>, ... <Partner: Charlier Ulrike>,
'...(remaining elements truncated)...']
"""
from __future__ import unicode_literals
import sys
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = __doc__
args = "scriptname [args ...]"
def handle(self, *args, **options):
if len(args) == 0:
raise CommandError("I need at least one argument.")
fn = args[0]
sys.argv = sys.argv[2:]
globals()['__name__'] = '__main__'
globals()['__file__'] = fn
execfile(fn, globals())
#~ execfile(fn,{})
| bsd-2-clause | -2,832,075,735,557,221,000 | 31.62963 | 110 | 0.674801 | false |
openwisp/netjsonconfig | tests/openwisp/test_backend.py | 1 | 8872 | import tarfile
import unittest
from copy import deepcopy
from hashlib import md5
from time import sleep
from netjsonconfig import OpenWisp
from netjsonconfig.exceptions import ValidationError
from netjsonconfig.utils import _TabsMixin
class TestBackend(unittest.TestCase, _TabsMixin):
"""
tests for OpenWisp backend
"""
config = {
"general": {"hostname": "openwisp-test"},
"interfaces": [
{"name": "tap0", "type": "virtual"},
{
"network": "serv",
"name": "br-serv",
"type": "bridge",
"bridge_members": ["tap0"],
"addresses": [
{
"proto": "static",
"family": "ipv4",
"address": "192.168.1.2",
"mask": 24,
}
],
},
{
"name": "wlan0",
"type": "wireless",
"wireless": {
"radio": "radio0",
"mode": "access_point",
"ssid": "wifi-service",
"isolate": True,
"network": ["wlan1", "serv"],
},
},
],
"radios": [
{
"name": "radio0",
"phy": "phy0",
"driver": "mac80211",
"protocol": "802.11n",
"channel": 11,
"channel_width": 20,
"tx_power": 5,
"country": "IT",
}
],
"openvpn": [
{
"ca": "/tmp/owispmanager/openvpn/x509/ca_1_service.pem",
"cert": "/tmp/owispmanager/openvpn/x509/l2vpn_client_2693.pem",
"cipher": "AES-128-CBC",
"comp_lzo": "yes",
"dev": "tap0",
"dev_type": "tap",
"down": "/tmp/owispmanager/openvpn/vpn_2693_script_down.sh",
"enabled": True,
"keepalive": "5 40",
"key": "/tmp/owispmanager/openvpn/x509/l2vpn_client_2693.pem",
"log": "/tmp/openvpn_2693.log",
"mode": "p2p",
"mute": 10,
"mute_replay_warnings": True,
"name": "2693",
"nobind": True,
"ns_cert_type": "server",
"persist_tun": True,
"proto": "tcp-client",
"remote": [{"host": "vpn.openwisp.org", "port": 12128}],
"script_security": 1,
"tls_client": True,
"up": "/tmp/owispmanager/openvpn/vpn_2693_script_up.sh",
"up_delay": 1,
"up_restart": True,
"verb": 1,
}
],
"tc_options": [
{"name": "tap0", "input_bandwidth": 2048, "output_bandwidth": 1024}
],
"files": [
{
"path": "/openvpn/x509/ca_1_service.pem",
"mode": "0600",
"contents": "-----BEGIN CERTIFICATE-----\ntest\n-----END CERTIFICATE-----\n", # noqa
},
{
"path": "/openvpn/x509/l2vpn_client_2693.pem",
"mode": "0600",
"contents": "-----BEGIN CERTIFICATE-----\ntest==\n-----END CERTIFICATE-----\n-----BEGIN RSA PRIVATE KEY-----\ntest\n-----END RSA PRIVATE KEY-----\n", # noqa
},
],
}
def test_uci(self):
o = OpenWisp({"general": {"hostname": "openwisp-test"}})
tar = tarfile.open(fileobj=o.generate(), mode='r')
system = tar.getmember('uci/system.conf')
contents = tar.extractfile(system).read().decode()
expected = self._tabs(
"""package system
config 'system' 'system'
option 'hostname' 'openwisp-test'
"""
)
self.assertEqual(contents, expected)
tar.close()
def test_hostname_required(self):
o = OpenWisp({"general": {"timezone": "UTC"}})
with self.assertRaises(ValidationError):
o.validate()
def test_install_script(self):
config = deepcopy(self.config)
o = OpenWisp(config)
tar = tarfile.open(fileobj=o.generate(), mode='r')
install = tar.getmember('install.sh')
contents = tar.extractfile(install).read().decode()
self.assertIn('openvpn --mktun --dev 2693 --dev-type tap', contents)
self.assertIn('ifup br-serv', contents)
self.assertIn('$(ip address show dev br-serv | grep 192.168.1.2)', contents)
self.assertIn('wifi up radio0', contents)
self.assertNotIn('Starting Cron', contents)
# esure is executable
self.assertEqual(install.mode, 493)
tar.close()
def test_ensure_tun_vpn_ignored(self):
config = deepcopy(self.config)
config['openvpn'][0]['dev_type'] = 'tun'
o = OpenWisp(config)
tar = tarfile.open(fileobj=o.generate(), mode='r')
install = tar.getmember('install.sh')
contents = tar.extractfile(install).read().decode()
self.assertNotIn('openvpn --mktun --dev 2693 --dev-type tap', contents)
tar.close()
def test_uninstall_script(self):
config = deepcopy(self.config)
o = OpenWisp(config)
tar = tarfile.open(fileobj=o.generate(), mode='r')
uninstall = tar.getmember('uninstall.sh')
contents = tar.extractfile(uninstall).read().decode()
self.assertIn('openvpn --rmtun --dev 2693 --dev-type tap', contents)
self.assertNotIn('Stopping Cron', contents)
# esure is executable
self.assertEqual(uninstall.mode, 493)
tar.close()
def test_up_and_down_scripts(self):
config = deepcopy(self.config)
o = OpenWisp(config)
tar = tarfile.open(fileobj=o.generate(), mode='r')
up = tar.getmember('openvpn/vpn_2693_script_up.sh')
contents = tar.extractfile(up).read().decode()
self.assertIn('rm -f /tmp/will_reboot', contents)
self.assertEqual(up.mode, 493) # esure is executable
down = tar.getmember('openvpn/vpn_2693_script_down.sh')
contents = tar.extractfile(down).read().decode()
self.assertIn('REBOOT_DELAY', contents)
self.assertEqual(down.mode, 493) # esure is executable
tar.close()
def test_double_generation(self):
o = OpenWisp(self.config)
o.generate()
o.generate()
def test_wireless_radio_disabled_0(self):
o = OpenWisp({'radios': self.config['radios']})
output = o.render()
self.assertIn("option 'disabled' '0'", output)
def test_tc_script(self):
config = deepcopy(self.config)
o = OpenWisp(config)
tar = tarfile.open(fileobj=o.generate(), mode='r')
tc = tar.getmember('tc_script.sh')
contents = tar.extractfile(tc).read().decode()
self.assertIn('tc qdisc del dev tap0 root', contents)
self.assertIn('tc qdisc del dev tap0 ingress', contents)
self.assertIn('tc qdisc add dev tap0 root handle 1: htb default 2', contents)
self.assertIn(
'tc class add dev tap0 parent 1 classid 1:1 htb rate 1024kbit burst 191k',
contents,
)
self.assertIn(
'tc class add dev tap0 parent 1:1 classid 1:2 htb rate 512kbit ceil 1024kbit',
contents,
)
self.assertIn('tc qdisc add dev tap0 ingress', contents)
line = (
'tc filter add dev tap0 parent ffff: preference 0 u32 match u32 0x0 0x0 police '
'rate 2048kbit burst 383k drop flowid :1'
)
self.assertIn(line, contents)
tar.close()
def test_cron(self):
config = deepcopy(self.config)
config['files'] = [
{
"path": "/crontabs/root",
"mode": "0644",
"contents": "* * * * * echo 'test' > /tmp/test-cron",
}
]
o = OpenWisp(config)
tar = tarfile.open(fileobj=o.generate(), mode='r')
install = tar.getmember('install.sh')
contents = tar.extractfile(install).read().decode()
self.assertIn('Starting Cron', contents)
uninstall = tar.getmember('uninstall.sh')
contents = tar.extractfile(uninstall).read().decode()
self.assertIn('Stopping Cron', contents)
tar.close()
def test_checksum(self):
""" ensures checksum of same config doesn't change """
o = OpenWisp({"general": {"hostname": "test"}})
# md5 is good enough and won't slow down test execution too much
checksum1 = md5(o.generate().getvalue()).hexdigest()
sleep(1)
checksum2 = md5(o.generate().getvalue()).hexdigest()
self.assertEqual(checksum1, checksum2)
| gpl-3.0 | -8,514,143,548,844,402,000 | 36.277311 | 173 | 0.51454 | false |
andyzsf/django | tests/admin_views/tests.py | 1 | 240212 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import re
import datetime
import unittest
from django.conf import settings, global_settings
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import (NoReverseMatch,
get_script_prefix, reverse, set_script_prefix)
# Register auth models with the admin.
from django.contrib.auth import get_permission_codename
from django.contrib.admin import ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import LogEntry, DELETION
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.validation import ModelAdminValidator
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.models import Group, User, Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import patch_logger
from django.test import modify_settings, override_settings
from django.utils import formats
from django.utils import translation
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri, force_bytes, force_text
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from django.utils._os import upath
from django.utils import six
# local test models
from .models import (Article, BarAccount, CustomArticle, EmptyModel, FooAccount,
Gallery, ModelWithStringPrimaryKey, Person, Persona, Picture, Podcast,
Section, Subscriber, Vodcast, Language, Collector, Widget, Grommet,
DooHickey, FancyDoodad, Whatsit, Category, Post, Plot, FunkyTag, Chapter,
Book, Promo, WorkHour, Employee, Question, Answer, Inquisition, Actor,
FoodDelivery, RowLevelChangePermissionModel, Paper, CoverLetter, Story,
OtherStory, ComplexSortedPerson, PluggableSearchPerson, Parent, Child, AdminOrderedField,
AdminOrderedModelMethod, AdminOrderedAdminMethod, AdminOrderedCallable,
Report, MainPrepopulated, RelatedPrepopulated, UnorderedObject,
Simple, UndeletableObject, UnchangeableObject, Choice, ShortMessage,
Telegram, Pizza, Topping, FilteredManager, City, Restaurant, Worker,
ParentWithDependentChildren, Character, FieldOverridePost, Color2)
from .admin import site, site2, CityAdmin
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
ADMIN_VIEW_TEMPLATES_DIR = settings.TEMPLATE_DIRS + (os.path.join(os.path.dirname(upath(__file__)), 'templates'),)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-colors.xml',
'admin-views-fabrics.xml', 'admin-views-books.xml']
# Store the bit of the URL where the admin is registered as a class
# variable. That way we can test a second AdminSite just by subclassing
# this test case and changing urlbit.
urlbit = 'admin'
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(response.content.index(force_bytes(text1)), response.content.index(force_bytes(text2)),
failing_msg)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
response = self.client.get('/test_admin/%s/admin_views/article/add' % self.urlbit)
self.assertRedirects(response,
'/test_admin/%s/admin_views/article/add/' % self.urlbit,
status_code=301)
def test_admin_static_template_tag(self):
"""
Test that admin_static.static is pointing to the collectstatic version
(as django.contrib.collectstatic is in installed apps).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get('/test_admin/%s/admin_views/section/add/' % self.urlbit)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get('/test_admin/%s/admin_views/section/add/' % self.urlbit, {'name': 'My Section'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response")
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get('/test_admin/%s/admin_views/section/1/' % self.urlbit)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get('/test_admin/%s/admin_views/section/abc/' % self.urlbit)
self.assertEqual(response.status_code, 404)
def test_basic_inheritance_GET_string_PK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get('/test_admin/%s/admin_views/supervillain/abc/' % self.urlbit)
self.assertEqual(response.status_code, 404)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post('/test_admin/%s/admin_views/section/add/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post('/test_admin/%s/admin_views/article/add/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
self.assertContains(response, 'title with a new\\u000Aline')
# Post data for edit inline
inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": "1",
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": "1",
"article_set-1-id": "2",
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": "3",
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
response = self.client.post('/test_admin/%s/admin_views/section/1/' % self.urlbit, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post('/test_admin/%s/admin_views/section/1/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': 2})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order.")
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '-3'})
self.assertContentBefore(response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order.")
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '4'})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '6'})
self.assertContentBefore(response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order.")
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '-6'})
self.assertContentBefore(response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
# This hard-codes the URL because it'll fail if it runs against the
# 'admin2' custom admin (which doesn't have the Person model).
response = self.client.get('/test_admin/admin/admin_views/person/', {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get('/test_admin/admin/admin_views/person/', {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# This hard-codes the URL because it'll fail if it runs against the
# 'admin2' custom admin (which doesn't have the Person model).
response = self.client.get('/test_admin/admin/admin_views/person/', {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get('/test_admin/admin/admin_views/language/', {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get('/test_admin/admin/admin_views/language/', {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get('/test_admin/admin/admin_views/podcast/', {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get('/test_admin/admin/admin_views/podcast/', {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get('/test_admin/admin/admin_views/complexsortedperson/', {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get('/test_admin/admin/admin_views/%s/' % url, {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view")
self.assertNotContains(response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to")
def test_relation_spanning_filters(self):
response = self.client.get('/test_admin/%s/admin_views/chapterxtra1/' %
self.urlbit)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s">' % query_string)
# ensure link works
filtered_response = self.client.get(
'/test_admin/%s/admin_views/chapterxtra1/?%s' % (
self.urlbit, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'notarealfield': '5'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
# Regression test for #18530
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit)
self.assertContains(response, '4 articles')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit)
self.assertContains(response, '<a href="/test_admin/%s/logout/">' % self.urlbit)
self.assertContains(response, '<a href="/test_admin/%s/password_change/">' % self.urlbit)
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(1,), current_app=self.urlbit)
link2 = reverse('admin:admin_views_fabric_change', args=(2,), current_app=self.urlbit)
response = self.client.get('/test_admin/%s/admin_views/fabric/' % self.urlbit)
fail_msg = "Changelist table isn't showing the right human-readable values set by a model field 'choices' option named group."
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get('/test_admin/%s/admin_views/fabric/' % self.urlbit)
fail_msg = "Changelist filter isn't showing options contained inside a model field 'choices' option named group."
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(response,
'<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True)
self.assertContains(response,
'<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
# This hard-codes the URl because it'll fail if it runs
# against the 'admin2' custom admin (which doesn't have the
# Post model).
response = self.client.get("/test_admin/admin/admin_views/post/")
self.assertContains(response, 'icon-unknown.gif')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertContains(response, 'Choisir une heure')
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get("/test_admin/admin/admin_views/album/?owner__email__startswith=fuzzy")
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("/test_admin/admin/admin_views/thing/?color__value__startswith=red")
self.assertEqual(response.status_code, 200)
response = self.client.get("/test_admin/admin/admin_views/thing/?color__value=red")
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("/test_admin/admin/admin_views/person/?age__gt=30")
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get("/test_admin/admin/admin_views/workhour/")
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("/test_admin/admin/admin_views/workhour/?employee__person_ptr__exact=%d" % e1.pk)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get("/test_admin/admin/admin_views/section/", {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get("/test_admin/admin/admin_views/section/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get("/test_admin/admin/admin_views/notreferenced/", {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get("/test_admin/admin/admin_views/recipe/", {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get("/test_admin/admin/admin_views/ingredient/", {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get("/test_admin/admin/admin_views/referencedbyparent/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get("/test_admin/admin/admin_views/referencedbyinline/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# We also want to prevent the add and change view from leaking a
# disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post("/test_admin/admin/admin_views/section/add/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post("/test_admin/admin/admin_views/section/%d/" % section.pk, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
response = self.client.get("/test_admin/admin/admin_views/inquisition/?leader__name=Palin&leader__age=27")
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("/test_admin/admin/admin_views/actor/?%s" % IS_POPUP_VAR)
self.assertContains(response, "opener.dismissRelatedLookupPopup(window, '%s')" % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
response = self.client.get('/test_admin/admin/')
self.assertNotContains(response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.')
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get('/test_admin/%s/admin_views/undeletableobject/%d/' %
(self.urlbit, instance.pk))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get('/test_admin/%s/admin_views/simple/' % self.urlbit)
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls
Regression test for #20934
"""
UnchangeableObject.objects.create()
response = self.client.get('/test_admin/admin/admin_views/unchangeableobject/')
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">UnchangeableObject object</th>', html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_proxy_model_content_type_is_used_for_log_entries(self):
"""
Log entries for proxy models should have the proxy model's content
type.
Regression test for #21084.
"""
color2_content_type = ContentType.objects.get_for_model(Color2, for_concrete_model=False)
# add
color2_add_url = reverse('admin:admin_views_color2_add')
self.client.post(color2_add_url, {'value': 'orange'})
color2_addition_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_addition_log.content_type)
# change
color_id = color2_addition_log.object_id
color2_change_url = reverse('admin:admin_views_color2_change', args=(color_id,))
self.client.post(color2_change_url, {'value': 'blue'})
color2_change_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_change_log.content_type)
# delete
color2_delete_url = reverse('admin:admin_views_color2_delete', args=(color_id,))
self.client.post(color2_delete_url)
color2_delete_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_delete_log.content_type)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TEMPLATE_DIRS=ADMIN_VIEW_TEMPLATES_DIR)
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_extended_bodyclass_template_change_form(self):
"""
Ensure that the admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get('/test_admin/%s/admin_views/section/add/' % self.urlbit)
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_change_password(self):
"""
Ensure that the auth/user/change_password.html template uses block
super in the bodyclass block.
"""
user = User.objects.get(username='super')
response = self.client.get('/test_admin/%s/auth/user/%s/password/' % (self.urlbit, user.id))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_index(self):
"""
Ensure that the admin/index.html template uses block.super in the
bodyclass block.
"""
response = self.client.get('/test_admin/%s/' % self.urlbit)
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
Ensure that the admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit)
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
Ensure that the admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get('/test_admin/%s/login/' % self.urlbit)
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
Ensure that the admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get('/test_admin/%s/auth/group/%s/delete/' % (self.urlbit, group.id))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
Ensure that the admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post('/test_admin/%s/auth/group/' % (self.urlbit), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
response = self.client.get("/test_admin/admin/admin_views/color2/")
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
fixtures = ["admin-views-users.xml"]
urlbit = "admin3"
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_change_form_URL_has_correct_value(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get('/test_admin/%s/admin_views/section/1/' % self.urlbit)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
Tests that the behavior for setting initial
form data can be overridden in the ModelAdmin class.
Usually, the initial value is set via the GET params.
"""
response = self.client.get('/test_admin/%s/admin_views/restaurant/add/' % self.urlbit, {'name': 'test_value'})
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(
'/test_admin/%s/admin_views/section/add/' % 'admin')
self.assertNotContains(response, 'jquery.js')
self.assertContains(response, 'jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(
'/test_admin/%s/admin_views/section/add/' % 'admin')
self.assertContains(response, 'jquery.js')
self.assertNotContains(response, 'jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-person.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
self.client.post('/test_admin/admin/admin_views/person/1/', post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=1)), 1)
def test_save_as_display(self):
"""
Ensure that 'save as' is displayed when activated and after submitting
invalid data aside save_as_new will not show us a form to overwrite the
initial model.
"""
response = self.client.get('/test_admin/admin/admin_views/person/1/')
self.assertTrue(response.context['save_as'])
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 3, 'alive': 'checked'}
response = self.client.post('/test_admin/admin/admin_views/person/1/', post_data)
self.assertEqual(response.context['form_url'], '/test_admin/admin/admin_views/person/add/')
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
urlbit = "admin2"
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get('/test_admin/admin2/', follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin2/login/', {
REDIRECT_FIELD_NAME: '/test_admin/admin2/',
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get('/test_admin/admin2/', follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get('/test_admin/admin2/logout/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
try:
response = self.client.get('/test_admin/admin2/')
except TypeError:
self.fail('AdminSite.index_template should accept a list of template paths')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get('/test_admin/admin2/admin_views/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get('/test_admin/admin2/password_change/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get('/test_admin/admin2/password_change/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get('/test_admin/admin2/password_change/done/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.login(username='super', password='secret')
response = self.client.get('/test_admin/%s/my_view/' % self.urlbit)
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.login(username='super', password='secret')
su = User.objects.get(username='super')
try:
response = self.client.get('/test_admin/admin4/auth/user/%s/password/' % su.pk)
except TypeError:
self.fail('ModelAdmin.change_user_password_template should accept a list of template paths')
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
fixtures = ['admin-views-users.xml']
def setUp(self):
"""Test setup."""
# Setup permissions, for our users who can add, change, and delete.
# We can't put this into the fixture, because the content type id
# and the permission id could be different on each run of the test.
opts = Article._meta
# User who can add Articles
add_user = User.objects.get(username='adduser')
add_user.user_permissions.add(get_perm(Article,
get_permission_codename('add', opts)))
# User who can change Articles
change_user = User.objects.get(username='changeuser')
change_user.user_permissions.add(get_perm(Article,
get_permission_codename('change', opts)))
# User who can delete Articles
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Article,
get_permission_codename('delete', opts)))
delete_user.user_permissions.add(get_perm(Section,
get_permission_codename('delete', Section._meta)))
# login POST dicts
self.super_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'super',
'password': 'secret',
}
self.super_email_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'super@example.com',
'password': 'secret',
}
self.super_email_bad_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'super@example.com',
'password': 'notsecret',
}
self.adduser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'adduser',
'password': 'secret',
}
self.changeuser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'changeuser',
'password': 'secret',
}
self.deleteuser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'deleteuser',
'password': 'secret',
}
self.joepublic_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'joepublic',
'password': 'secret',
}
self.no_username_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
# Super User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Test if user enters email address
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Change User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Delete User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Regular User should not be able to login.
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '/test_admin/admin/?%s' % query_string
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1}
# Change User should not have access to add articles
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.changeuser_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
response = self.client.get('/test_admin/admin/admin_views/article/add/')
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post('/test_admin/admin/admin_views/article/add/', add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.all().count(), 3)
self.client.get('/test_admin/admin/logout/')
# Add user may login and POST to add view, then redirect to admin root
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login)
addpage = self.client.get('/test_admin/admin/admin_views/article/add/')
change_list_link = '› <a href="/test_admin/admin/admin_views/article/">Articles</a>'
self.assertNotContains(addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.')
post = self.client.post('/test_admin/admin/admin_views/article/add/', add_dict)
self.assertRedirects(post, '/test_admin/admin/')
self.assertEqual(Article.objects.all().count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get('/test_admin/admin/logout/')
# Super can add too, but is redirected to the change list view
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.super_login)
addpage = self.client.get('/test_admin/admin/admin_views/article/add/')
self.assertContains(addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.')
post = self.client.post('/test_admin/admin/admin_views/article/add/', add_dict)
self.assertRedirects(post, '/test_admin/admin/admin_views/article/')
self.assertEqual(Article.objects.all().count(), 5)
self.client.get('/test_admin/admin/logout/')
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.login(username='joepublic', password='secret')
# Check and make sure that if user expires, data still persists
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.super_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1}
# add user should not be able to view the list of article or change any of them
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/')
self.assertEqual(response.status_code, 403)
response = self.client.get('/test_admin/admin/admin_views/article/1/')
self.assertEqual(response.status_code, 403)
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertEqual(post.status_code, 403)
self.client.get('/test_admin/admin/logout/')
# change user can view all items and edit them
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.changeuser_login)
response = self.client.get('/test_admin/admin/admin_views/article/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/test_admin/admin/admin_views/article/1/')
self.assertEqual(response.status_code, 200)
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertRedirects(post, '/test_admin/admin/admin_views/article/')
self.assertEqual(Article.objects.get(pk=1).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertContains(post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error')
change_dict['content'] = ''
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertContains(post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors')
self.client.get('/test_admin/admin/logout/')
# Test redirection when using row-level change permissions. Refs #11513.
RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
RowLevelChangePermissionModel.objects.create(id=2, name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/')
self.assertEqual(response.status_code, 403)
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/', {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/', {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, '/test_admin/admin/')
self.client.get('/test_admin/admin/logout/')
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/', {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/', {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get('/test_admin/admin/logout/')
def test_history_view(self):
"""History view should restrict access."""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
# add user should not be able to view the list of article or change any of them
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
self.assertEqual(response.status_code, 403)
self.client.get('/test_admin/admin/logout/')
# change user can view all items and edit them
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.changeuser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
RowLevelChangePermissionModel.objects.create(id=2, name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/history/')
self.assertEqual(response.status_code, 403)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/history/')
self.assertEqual(response.status_code, 200)
self.client.get('/test_admin/admin/logout/')
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/history/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/history/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get('/test_admin/admin/logout/')
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
# Set up and log in user.
url = '/test_admin/admin/admin_views/article/add/'
add_link_text = ' class="add-another"'
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login)
# The add user can't add sections yet, so they shouldn't see the "add
# section" link.
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the add user to add sections too. Now they can see the "add
# section" link.
add_user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
add_user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_custom_model_admin_templates(self):
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.super_login)
# Test custom change list template with custom extra context
response = self.client.get('/test_admin/admin/admin_views/customarticle/')
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get('/test_admin/admin/admin_views/customarticle/add/')
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post('/test_admin/admin/admin_views/customarticle/add/', {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, '/test_admin/admin/admin_views/customarticle/')
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/' % article_pk)
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/delete/' % article_pk)
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post('/test_admin/admin/admin_views/customarticle/', data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/history/' % article_pk)
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
self.client.get('/test_admin/admin/logout/')
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
delete_dict = {'post': 'yes'}
# add user should not be able to delete articles
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/delete/')
self.assertEqual(response.status_code, 403)
post = self.client.post('/test_admin/admin/admin_views/article/1/delete/', delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.all().count(), 3)
self.client.get('/test_admin/admin/logout/')
# Delete user can delete
self.client.get('/test_admin/admin/')
self.client.post(login_url, self.deleteuser_login)
response = self.client.get('/test_admin/admin/admin_views/section/1/delete/')
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/1/")
response = self.client.get('/test_admin/admin/admin_views/article/1/delete/')
self.assertContains(response, "admin_views/article/1/")
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post('/test_admin/admin/admin_views/article/1/delete/', delete_dict)
self.assertRedirects(post, '/test_admin/admin/')
self.assertEqual(Article.objects.all().count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, '1')
self.client.get('/test_admin/admin/logout/')
def test_disabled_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get('/test_admin/admin/', follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get('/test_admin/admin/secure-view/', follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get('/test_admin/admin/', follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get('/test_admin/admin/secure-view/', follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_index_fail_early(self):
"""
If a user has no module perms, avoid iterating over all the modeladmins
in the registry.
"""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.post(login_url, self.changeuser_login)
# the user has no module permissions, because this module doesn't exist
change_user.user_permissions.remove(permission)
response = self.client.get('/test_admin/admin/admin_views/')
self.assertEqual(response.status_code, 403)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get('/test_admin/admin/admin_views/')
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = "/test_admin/admin/r/%s/%s/" % (model_ctype.pk, obj.pk)
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.login(username='super', password='secret')
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
six.assertRegex(self, response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
Ensure that has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
login_url = reverse('admin:login') + '?next=/test_admin/admin/'
self.client.post(login_url, self.super_login)
response = self.client.get('/test_admin/admin/')
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get('/test_admin/admin/logout/')
self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/')
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get('/test_admin/admin/logout/')
self.client.post(login_url, self.changeuser_login)
response = self.client.get('/test_admin/admin/')
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get('/test_admin/admin/logout/')
self.client.post(login_url, self.deleteuser_login)
response = self.client.get('/test_admin/admin/')
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.get('/test_admin/admin/logout/')
def test_overriding_has_module_permission(self):
"""
Ensure that overriding has_module_permission() has the desired effect.
In this case, it always returns False, so the module should not be
displayed on the admin index page for any users.
"""
login_url = reverse('admin:login') + '?next=/test_admin/admin7/'
self.client.post(login_url, self.super_login)
response = self.client.get('/test_admin/admin7/')
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get('/test_admin/admin7/logout/')
self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin7/')
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get('/test_admin/admin7/logout/')
self.client.post(login_url, self.changeuser_login)
response = self.client.get('/test_admin/admin7/')
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get('/test_admin/admin7/logout/')
self.client.post(login_url, self.deleteuser_login)
response = self.client.get('/test_admin/admin7/')
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.get('/test_admin/admin7/logout/')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
fixtures = ['admin-views-users.xml']
def setUp(self):
opts = Report._meta
# User who can change Reports
change_user = User.objects.get(username='changeuser')
change_user.user_permissions.add(get_perm(Report,
get_permission_codename('change', opts)))
# login POST dict
self.changeuser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'changeuser',
'password': 'secret',
}
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.get('/test_admin/admin/')
r = self.client.post(reverse('admin:login'), self.changeuser_login)
r = self.client.get('/test_admin/admin/')
# we shouldn' get an 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get('/test_admin/admin/logout/')
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
fixtures = ['admin-views-users.xml', 'deleted-objects.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(br"""<li>Plot: <a href=".+/admin_views/plot/1/">World Domination</a>\s*<ul>\s*<li>Plot details: <a href=".+/admin_views/plotdetails/1/">almost finished</a>""")
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(1))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = """<li>Cyclic one: <a href="/test_admin/admin/admin_views/cyclicone/1/">I am recursive</a>"""
two = """<li>Cyclic two: <a href="/test_admin/admin/admin_views/cyclictwo/1/">I am recursive too</a>"""
response = self.client.get('/test_admin/admin/admin_views/cyclicone/%s/delete/' % quote(1))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot,
get_permission_codename('delete', Plot._meta)))
self.assertTrue(self.client.login(username='deleteuser',
password='secret'))
response = self.client.get('/test_admin/admin/admin_views/plot/%s/delete/' % quote(1))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get("/test_admin/admin/admin_views/question/%s/delete/" % quote(q.pk))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Because.</a></li>' % a1.pk)
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Yes.</a></li>' % a2.pk)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(1))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = """<li>Plot: <a href="/test_admin/admin/admin_views/plot/1/">World Domination</a>"""
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(1))
self.assertContains(response, should_contain)
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(2))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = """<li>Plot: <a href="/test_admin/admin/admin_views/plot/2/">World Peace</a></li>"""
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(2))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
"""<li>Villain: <a href="/test_admin/admin/admin_views/villain/3/">Bob</a>""",
"""<li>Super villain: <a href="/test_admin/admin/admin_views/supervillain/3/">Bob</a>""",
"""<li>Secret hideout: floating castle""",
"""<li>Super secret hideout: super floating castle!"""
]
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(3))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get('/test_admin/admin/admin_views/supervillain/%s/delete/' % quote(3))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = Plot.objects.get(pk=3)
FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = """<li>Funky tag: <a href="/test_admin/admin/admin_views/funkytag/1/">hott"""
response = self.client.get('/test_admin/admin/admin_views/plot/%s/delete/' % quote(3))
self.assertContains(response, should_contain)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
fixtures = ['admin-views-users.xml', 'deleted-objects.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_generic_content_object_in_list_display(self):
plot = Plot.objects.get(pk=3)
FunkyTag.objects.create(content_object=plot, name='hott')
response = self.client.get('/test_admin/admin/admin_views/funkytag/')
self.assertContains(response, "%s</td>" % plot)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
fixtures = ['admin-views-users.xml', 'string-primary-key.xml']
def __init__(self, *args):
super(AdminViewStringPrimaryKeyTest, self).__init__(*args)
self.pk = """abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 -_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
def setUp(self):
self.client.login(username='super', password='secret')
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
LogEntry.objects.log_action(100, content_type_pk, self.pk, self.pk, 2, change_message='Changed something')
def tearDown(self):
self.client.logout()
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/history/' % quote(self.pk))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(self.pk))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
prefix = '/test_admin/admin/admin_views/modelwithstringprimarykey/'
response = self.client.get(prefix)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
should_contain = """<th class="field-__str__"><a href="%s%s/">%s</a></th>""" % (prefix, pk_final_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get('/test_admin/admin/')
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_without_content_type(self):
"If a LogEntry is missing content_type it will not display it in span tag under the hyperlink."
response = self.client.get('/test_admin/admin/')
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
should_contain = "Model with string primary key" # capitalized in Recent Actions
self.assertContains(response, should_contain)
logentry = LogEntry.objects.get(content_type__name__iexact=should_contain)
# http://code.djangoproject.com/ticket/10275
# if the log entry doesn't have a content type it should still be
# possible to view the Recent Actions part
logentry.content_type = None
logentry.save()
counted_presence_before = response.content.count(force_bytes(should_contain))
response = self.client.get('/test_admin/admin/')
counted_presence_after = response.content.count(force_bytes(should_contain))
self.assertEqual(counted_presence_before - 1,
counted_presence_after)
def test_logentry_get_admin_url(self):
"LogEntry.get_admin_url returns a URL to edit the entry's object or None for non-existent (possibly deleted) models"
log_entry_name = "Model with string primary key" # capitalized in Recent Actions
logentry = LogEntry.objects.get(content_type__name__iexact=log_entry_name)
model = "modelwithstringprimarykey"
desired_admin_url = "/test_admin/admin/admin_views/%s/%s/" % (model, iri_to_uri(quote(self.pk)))
self.assertEqual(logentry.get_admin_url(), desired_admin_url)
logentry.content_type.model = "non-existent"
self.assertEqual(logentry.get_admin_url(), None)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/delete/' % quote(self.pk))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
should_contain = """/%s/">%s</a>""" % (escape(iri_to_uri(quote(self.pk))), escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add should be visible"
add_model = ModelWithStringPrimaryKey(pk="i have something to add")
add_model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(add_model.pk))
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(delete_model.pk))
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(history_model.pk))
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(model.pk))
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse('admin:%s_modelwithstringprimarykey_change' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse('admin:%s_modelwithstringprimarykey_history' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
self.assertContains(response, '<a href="%s" class="historylink"' % expected_link)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
'/test_admin/admin/admin_views/modelwithstringprimarykey/add/',
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertEqual(
response['location'],
(
'http://testserver/test_admin/admin/admin_views/'
'modelwithstringprimarykey/123_2Fhistory/' # PK is quoted
)
)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
fixtures = ['admin-views-users.xml']
def tearDown(self):
self.client.logout()
def test_secure_view_shows_login_if_not_logged_in(self):
"""
Ensure that we see the admin login form.
"""
secure_url = '/test_admin/admin/secure-view/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
fixtures = ['admin-views-unicode.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": "1",
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": "2",
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": "3",
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post('/test_admin/admin/admin_views/book/1/', post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
Ensure that the delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
response = self.client.get('/test_admin/admin/admin_views/book/1/delete/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/test_admin/admin/admin_views/book/1/delete/', delete_dict)
self.assertRedirects(response, '/test_admin/admin/admin_views/book/')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-person.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django",
release_date=datetime.date.today())
response = self.client.get('/test_admin/admin/admin_views/podcast/')
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get('/test_admin/admin/admin_views/vodcast/')
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get('/test_admin/admin/admin_views/language/')
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get('/test_admin/admin/admin_views/person/')
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/person/',
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
}
self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "1",
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "3",
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post('/test_admin/admin/admin_views/person/?gender__exact=1', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "1",
"form-0-gender": "1",
"_save": "Save",
}
self.client.post('/test_admin/admin/admin_views/person/?q=john', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/fooddelivery/', data)
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery with this Driver and Restaurant already exists.</li></ul></td></tr>', 1, html=True)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/fooddelivery/', data)
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery with this Driver and Restaurant already exists.</li></ul></td></tr>', 2, html=True)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "2",
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "2",
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/person/', data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/category/', data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get('/test_admin/admin/admin_views/unorderedobject/')
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get('/test_admin/admin/admin_views/unorderedobject/?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get('/test_admin/admin/admin_views/person/')
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get('/test_admin/admin/admin_views/person/?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
response = self.client.get('/test_admin/admin/admin_views/story/')
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = OtherStory.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get('/test_admin/admin/admin_views/otherstory/')
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
fixtures = ['admin-views-users', 'multiple-child-classes',
'admin-views-person']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get('/test_admin/admin/auth/user/?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get('/test_admin/admin/admin_views/person/?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get('/test_admin/admin/admin_views/person/?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get('/test_admin/admin/admin_views/pluggablesearchperson/?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get('/test_admin/admin/admin_views/pluggablesearchperson/?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get('/test_admin/admin/admin_views/person/?q=Gui')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=bar')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_inline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get('/test_admin/admin/admin_views/persona/add/')
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post('/test_admin/admin/admin_views/persona/add/', post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get('/test_admin/admin/admin_views/persona/%d/' % persona_id)
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post('/test_admin/admin/admin_views/persona/%d/' % persona_id, post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminActionsTest(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-actions.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, "<h2>Summary</h2>")
self.assertContains(confirmation, "<li>Subscribers: 3</li>")
self.assertContains(confirmation, "<li>External subscribers: 1</li>")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
self.client.post('/test_admin/admin/admin_views/subscriber/', delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post("/test_admin/admin/admin_views/question/", action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Because.</a></li>' % a1.pk, html=True)
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Yes.</a></li>' % a2.pk, html=True)
def test_model_admin_default_delete_action_no_change_url(self):
"""
Default delete action shouldn't break if a user's ModelAdmin removes the url for change_view.
Regression test for #20640
"""
obj = UnchangeableObject.objects.create()
action_data = {
ACTION_CHECKBOX_NAME: obj.pk,
"action": "delete_selected",
"index": "0",
}
response = self.client.post('/test_admin/admin/admin_views/unchangeableobject/', action_data)
# No 500 caused by NoReverseMatch
self.assertEqual(response.status_code, 200)
# The page shouldn't display a link to the nonexistent change page
self.assertContains(response, "<li>Unchangeable object: UnchangeableObject object</li>", 1, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = '/test_admin/admin/admin_views/externalsubscriber/?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get('/test_admin/admin/admin_views/externalsubscriber/')
self.assertContains(response, '''<label>Action: <select name="action">
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get('/test_admin/admin/admin_views/oldsubscriber/')
self.assertEqual(response.context["action_form"], None)
self.assertNotContains(response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response")
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get('/test_admin/admin/admin_views/oldsubscriber/')
self.assertEqual(response.context["action_form"], None)
self.assertContains(response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions")
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get('/test_admin/admin/admin_views/subscriber/')
self.assertNotEqual(response.context["action_form"], None)
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get('/test_admin/admin/admin_views/subscriber/')
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get('/test_admin/admin/admin_views/subscriber/')
self.assertNotEqual(response.context["action_form"], None)
response = self.client.get(
'/test_admin/admin/admin_views/subscriber/?%s' % IS_POPUP_VAR)
self.assertEqual(response.context["action_form"], None)
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
'/test_admin/admin/admin_views/actor/add/?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
fixtures = ['admin-views-users.xml']
urlbit = 'admin'
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post('/test_admin/%s/admin_views/gadget/add/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get('/test_admin/%s/admin_views/gadget/' % self.urlbit)
# Ensure that data is still not visible on the page
response = self.client.get('/test_admin/%s/admin_views/gadget/' % self.urlbit)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get('/test_admin/admin/admin_views/parent/add/')
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get('/test_admin/admin/admin_views/emptymodel/')
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
# 4 queries are expected: 1 for the session, 1 for the user,
# 1 for the count and 1 for the objects on the page
with self.assertNumQueries(4):
resp = self.client.get('/test_admin/admin/admin_views/person/')
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
# here one more count(*) query will run, because filters were applied
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get('/test_admin/admin/admin_views/person/', extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get('/test_admin/admin/admin_views/person/', extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get('/test_admin/admin/admin_views/person/', extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get('/test_admin/admin/admin_views/emptymodel/%s/' % i)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/coverletter/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The cover letter "Candidate, Best" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/shortmessage/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The short message "ShortMessage object" was added successfully.</li>',
html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/telegram/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The telegram "Urgent telegram" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/paper/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The paper "Paper object" was added successfully.</li>',
html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/coverletter/%s/' % cl.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/coverletter/%s/' % cl.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The cover letter "John Doe II" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/shortmessage/%s/' % sm.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/shortmessage/%s/' % sm.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The short message "ShortMessage_Deferred_timestamp object" was changed successfully.</li>',
html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/telegram/%s/' % t.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/telegram/%s/' % t.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The telegram "Telegram without typo" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/paper/%s/' % p.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/paper/%s/' % p.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "Paper_Deferred_author object" was changed successfully.</li>',
html=True
)
def test_history_view_custom_qs(self):
"""
Ensure that custom querysets are considered for the admin history view.
Refs #21013.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get('/test_admin/admin/admin_views/filteredmanager/')
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(self.client.get('/test_admin/admin/admin_views/filteredmanager/1/history/').status_code, 200)
self.assertEqual(self.client.get('/test_admin/admin/admin_views/filteredmanager/2/history/').status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-actions.xml']
urlbit = 'admin'
def setUp(self):
self.client.login(username='super', password='secret')
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
tdir = tempfile.gettempdir()
file1 = tempfile.NamedTemporaryFile(suffix=".file1", dir=tdir)
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def tearDown(self):
self.client.logout()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post('/test_admin/%s/admin_views/gallery/%d/' % (self.urlbit, self.gallery.id), post_data)
self.assertContains(response, b"Currently")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def tearDown(self):
self.client.logout()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get('/test_admin/admin/admin_views/collector/1/')
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-colors.xml', 'admin-views-fabrics.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get('/test_admin/admin/')
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get('/test_admin/admin/admin_views/')
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get('/test_admin/admin/admin_views/fabric/')
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get('/test_admin/admin/admin_views/fabric/add/')
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get('/test_admin/admin/admin_views/section/1/')
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get('/test_admin/admin/admin_views/section/1/history/')
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get('/test_admin/admin/admin_views/section/1/delete/')
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get('/test_admin/admin/')
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get('/test_admin/admin/logout/')
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get('/test_admin/password_change/')
self.assertEqual(get_max_age(response), None)
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get('/test_admin/admin/password_change/done/')
self.assertEqual(get_max_age(response), None)
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertEqual(get_max_age(response), None)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_prepopulated_on(self):
response = self.client.get('/test_admin/admin/admin_views/prepopulatedpost/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "id: '#id_slug',")
self.assertContains(response, "field['dependency_ids'].push('#id_title');")
self.assertContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
def test_prepopulated_off(self):
response = self.client.get('/test_admin/admin/admin_views/prepopulatedpost/1/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, "id: '#id_slug'")
self.assertNotContains(response, "field['dependency_ids'].push('#id_title');")
self.assertNotContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get('/test_admin/admin/admin_views/prepopulatedpostlargeslug/add/')
self.assertContains(response, "maxLength: 1000") # instead of 1,000
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class SeleniumAdminViewsFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_views'] + AdminSeleniumWebDriverTestCase.available_apps
fixtures = ['admin-views-users.xml']
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get('%s%s' % (self.live_server_url,
'/test_admin/admin/admin_views/mainprepopulated/add/'))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_css_selector('#id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_name').send_keys(' this is the mAin nÀMë and it\'s awεšome')
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesome-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesome')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-name').send_keys(' here is a sŤāÇkeð inline ! ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-name').send_keys(' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo') # 50 characters maximum for slug1 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo') # 60 characters maximum for slug2 field
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-name').send_keys('And now, with a tÃbűlaŘ inline !!!')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-name').send_keys('a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšome',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesome-2012-02-18',
slug2='option-two-main-name-and-its-awesome',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', # 75 characters in name field
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
Ensure that the prepopulation works for existing objects too, as long
as the original field is empty.
Refs #19082.
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super',
password='secret',
login_url='/test_admin/admin/')
object_url = '%s%s' % (
self.live_server_url,
'/test_admin/admin/admin_views/mainprepopulated/{}/'.format(item.id))
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get('%s%s' % (self.live_server_url,
'/test_admin/admin/admin_views/article/add/'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('fieldsetcollapser0').text,
"Hide"
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get('%s%s' % (self.live_server_url,
'/test_admin/admin/admin_views/picture/add/'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get('%s%s' % (self.live_server_url,
'/test_admin/admin/admin_views/reservation/add/'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
self.selenium.find_element_by_class_name('cancel-link').click()
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
self.selenium.find_element_by_class_name('cancel-link').click()
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
class SeleniumAdminViewsChromeTests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumAdminViewsIETests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_readonly_get(self):
response = self.client.get('/test_admin/admin/admin_views/post/add/')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response,
"<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "<p>Multiline<br />html<br />content</p>", html=True)
self.assertContains(response, "InlineMultiline<br />test<br />string")
self.assertContains(response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>', html=True)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get('/test_admin/admin/admin_views/post/%d/' % p.pk)
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post('/test_admin/admin/admin_views/post/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post('/test_admin/admin/admin_views/post/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get('/test_admin/admin/admin_views/pizza/add/')
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get('/test_admin/admin2/auth/user/%s/password/' % su.pk)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get('/test_admin/admin/admin_views/choice/%s/' % choice.pk)
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get('/test_admin/admin/admin_views/topping/add/')
self.assertEqual(response.status_code, 200)
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get('/test_admin/admin/admin_views/fieldoverridepost/%d/' % p.pk)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p class="help">Overridden help text for the date</p>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get('/test_admin/admin/admin_views/stumpjoke/add/')
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get('/test_admin/admin/admin_views/sketch/add/')
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get('/test_admin/admin/admin_views/sketch/add/')
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get('/test_admin/admin/admin_views/sketch/add/')
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/user/%s/' % new_user.pk)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/user/%s/' % new_user.pk)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertNotIn('password', adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'],
["The two password fields didn't match."])
def test_user_fk_popup(self):
"""Quick user addition in a FK popup shouldn't invoke view for further user customization"""
response = self.client.get('/test_admin/admin/admin_views/album/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, '/test_admin/admin/auth/user/add')
self.assertContains(response, 'class="add-another" id="add_id_owner"')
response = self.client.get('/test_admin/admin/auth/user/add/?_popup=1')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post('/test_admin/admin/auth/user/add/?_popup=1', data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/user/add/')
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get('/test_admin/admin/auth/user/%s/' % u.pk)
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get('/test_admin/admin3/auth/user/%s/password/' % u.pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post('/test_admin/admin/auth/group/add/', {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/group/')
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get('/test_admin/admin/auth/group/%s/' % g.pk)
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get('/test_admin/admin/admin_views/post/add/')
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get("/test_admin/admin/")
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get("/test_admin/admin/admin_views/")
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get('/test_admin/admin/admin_views/section/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get('/test_admin/admin/admin_views/section/')
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
'/test_admin/admin/admin_views/section/1/delete/')
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get('/test_admin/admin/admin_views/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/section/',
action_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose",
release_date=datetime.date.today())
response = self.client.get('/test_admin/admin/admin_views/podcast/')
self.assertContains(
response, '<th class="field-name">')
self.assertContains(
response, '<td class="field-release_date nowrap">')
self.assertContains(
response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_tags(self):
response = self.client.get('/test_admin/admin/doc/tags/')
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get('/test_admin/admin/doc/filters/')
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class ValidXHTMLTests(TestCase):
fixtures = ['admin-views-users.xml']
urlbit = 'admin'
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
@override_settings(
TEMPLATE_CONTEXT_PROCESSORS=filter(
lambda t: t != 'django.core.context_processors.i18n',
global_settings.TEMPLATE_CONTEXT_PROCESSORS),
USE_I18N=False,
)
def test_lang_name_present(self):
response = self.client.get('/test_admin/%s/admin_views/' % self.urlbit)
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post('/test_admin/admin/admin_views/parent/add/', post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post('/test_admin/admin/admin_views/parent/%s/' % parent.id, post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post('/test_admin/admin/admin_views/parent/', post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get('/test_admin/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], '/test_admin/admin/logout/')
# we are now logged out
response = self.client.get('/test_admin/admin/logout/')
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get('/test_admin/admin/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], '/test_admin/admin/login/')
self.assertContains(response, '<input type="hidden" name="next" value="/test_admin/admin/" />')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/usermessenger/',
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/usermessenger/',
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
fixtures = ['admin-views-users.xml']
admin_site = site
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
self.assertURLEqual(
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'
)
# Ignore ordering of querystring.
self.assertURLEqual(
'/test_admin/admin/auth/user/?is_staff__exact=0&is_superuser__exact=0',
'/test_admin/admin/auth/user/?is_superuser__exact=0&is_staff__exact=0'
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'/test_admin/admin/auth/user/105/?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return 104
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">joepublic</a>',
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.latest('pk').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.latest('pk').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
original_prefix = get_script_prefix()
try:
set_script_prefix('/prefix/')
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
finally:
set_script_prefix(original_prefix)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_all_fields_visible(self):
response = self.client.get('/test_admin/admin/admin_views/emptymodelvisible/add/')
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get('/test_admin/admin/admin_views/emptymodelhidden/add/')
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get('/test_admin/admin/admin_views/emptymodelmixin/add/')
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-restaurants.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post('/test_admin/admin/admin_views/parentwithdependentchildren/add/',
post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post('/test_admin/admin/admin_views/parentwithdependentchildren/%d/'
% pwdc.id, post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_check(self):
"Ensure that the view_on_site value is either a boolean or a callable"
try:
CityAdmin.view_on_site = True
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = False
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = []
self.assertEqual(CityAdmin.check(City), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
hint=None,
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get('/test_admin/admin/admin_views/restaurant/1/')
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response,
'"/test_admin/admin/r/%s/1/"' % content_type_pk,
)
def test_true(self):
"Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get('/test_admin/admin/admin_views/city/1/')
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response,
'"/test_admin/admin/r/%s/1/"' % content_type_pk,
)
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get('/test_admin/admin/admin_views/worker/1/')
worker = Worker.objects.get(pk=1)
self.assertContains(response,
'"/worker/%s/%s/"' % (worker.surname, worker.name),
)
def test_missing_get_absolute_url(self):
"Ensure None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-restaurants.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get('/test_admin/admin/admin_views/state/1/')
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response,
'/test_admin/admin/r/%s/1/' % content_type_pk,
)
def test_true(self):
"Ensure that the 'View on site' button is displayed if view_on_site is True"
response = self.client.get('/test_admin/admin/admin_views/city/1/')
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response,
'/test_admin/admin/r/%s/1/' % content_type_pk,
)
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get('/test_admin/admin/admin_views/restaurant/1/')
worker = Worker.objects.get(pk=1)
self.assertContains(response,
'"/worker_inline/%s/%s/"' % (worker.surname, worker.name),
)
class AdminGenericRelationTests(TestCase):
def test_generic_relation_fk_list_filter(self):
"""
Validates a model with a generic relation to a model with
a foreign key can specify the generic+fk relationship
path as a list_filter. See trac #21428.
"""
class GenericFKAdmin(ModelAdmin):
list_filter = ('tags__content_type',)
validator = ModelAdminValidator()
try:
validator.validate_list_filter(GenericFKAdmin, Plot)
except ImproperlyConfigured:
self.fail("Couldn't validate a GenericRelation -> FK path in ModelAdmin.list_filter")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestEtagWithAdminView(TestCase):
# See https://code.djangoproject.com/ticket/16003
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_views.urls",
)
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
try:
response = self.client.post('/test_admin/admin/admin_views/explicitlyprovidedpk/add/', post_data)
except Exception as e:
self.fail(e)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
try:
response = self.client.post('/test_admin/admin/admin_views/explicitlyprovidedpk/1/', post_data)
except Exception as e:
self.fail(e)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
try:
response = self.client.post('/test_admin/admin/admin_views/implicitlygeneratedpk/add/', post_data)
except Exception as e:
self.fail(e)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
try:
response = self.client.post('/test_admin/admin/admin_views/implicitlygeneratedpk/1/', post_data)
except Exception as e:
self.fail(e)
self.assertEqual(response.status_code, 302)
| bsd-3-clause | 3,763,679,827,564,121,000 | 44.677763 | 251 | 0.628556 | false |
tpainter/df_everywhere | df_everywhere/util/wamp_local.py | 1 | 5054 | # DF Everywhere
# Copyright (C) 2014 Travis Painter
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from twisted.internet import reactor
from twisted.internet.endpoints import clientFromString
from twisted.internet.protocol import ReconnectingClientFactory
from autobahn.twisted.wamp import ApplicationSessionFactory
from autobahn.twisted.wamp import ApplicationSession
from autobahn.twisted.websocket import WampWebSocketClientFactory
from autobahn.wamp import types
from autobahn.wamp import auth
class SubpubTileset(ApplicationSession):
"""
An application component that subscribes and receives events.
"""
def onConnect(self):
self.join(self.config.realm, [u"wampcra"], self.config.extra['topic'])
def onChallenge(self, challenge):
#print challenge
if challenge.method == u"wampcra":
if u'salt' in challenge.extra:
key = auth.derive_key(self.config.extra['topic'], challenge.extra['salt'],
challenge.extra.get('iterations', None), challenge.extra.get('keylen', None))
else:
key = self.config.extra['topic']
signature = auth.compute_wcs(key, challenge.extra['challenge'])
return signature
else:
raise Exception("don't know how to compute challenge for authmethod {}".format(challenge.method))
def onJoin(self, details):
if not self in self.factory._myConnection:
self.factory._myConnection.append(self)
def onLeave(self, details):
if self in self.factory._myConnection:
self.factory._myConnection.remove(self)
self.disconnect()
class MyClientFactory(WampWebSocketClientFactory, ReconnectingClientFactory):
#from: https://gist.github.com/DenJohX/e6d0864738da10cb9685
def clientConnectionFailed(self, connector, reason):
print "*************************************"
print "Connection Failed"
print "reason:", reason
print "*************************************"
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
print "*************************************"
print "Connection Lost"
print "reason:", reason
print "*************************************"
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
def wampServ(wampAddress, wampPort, wampDebug = False):
"""
Sets up an Autobahn|Python WAMPv2 server.
Code modified from WAMP documentation.
"""
from twisted.internet.endpoints import serverFromString
from autobahn.wamp.router import RouterFactory
from autobahn.twisted.wamp import RouterSessionFactory
from autobahn.twisted.websocket import WampWebSocketServerFactory
## create a WAMP router factory
router_factory = RouterFactory()
## create a WAMP router session factory
session_factory = RouterSessionFactory(router_factory)
## create a WAMP-over-WebSocket transport server factory
transport_factory = WampWebSocketServerFactory(session_factory, wampAddress, debug = wampDebug)
transport_factory.setProtocolOptions(failByDrop = False)
## Start websocket server
server = serverFromString(reactor, wampPort)
server.listen(transport_factory)
def wampClient(wampAddress, wampClientEndpoint, topic, key):
"""
Sets up an Autobahn|python WAMPv2 client.
Code modified from WAMP documentation.
"""
component_config = types.ComponentConfig(realm = "realm1", extra = {'key': unicode(key), 'topic': unicode(topic)})
session_factory = ApplicationSessionFactory(config = component_config)
session_factory._myConnection = []
session_factory.session = SubpubTileset
## create a WAMP-over-WebSocket transport client factory
#transport_factory = WampWebSocketClientFactory(session_factory, wampAddress, debug = False)
transport_factory = MyClientFactory(session_factory, wampAddress, debug = False, debug_wamp = False)
transport_factory.setProtocolOptions(failByDrop = False)
## start a WebSocket client from an endpoint
client = clientFromString(reactor, wampClientEndpoint)
client.connect(transport_factory)
return session_factory._myConnection | gpl-2.0 | 6,578,624,898,795,414,000 | 41.125 | 118 | 0.691334 | false |
andrzej-r/fusesoc | fusesoc/simulator/simulator.py | 1 | 5975 | from fusesoc.config import Config
from fusesoc.coremanager import CoreManager
from fusesoc.utils import Launcher, pr_info, pr_err
import argparse
import shutil
import os
import logging
import sys
if sys.version_info[0] >= 3:
import urllib.request as urllib
from urllib.error import URLError
else:
import urllib
from urllib2 import URLError
logger = logging.getLogger(__name__)
class _Verilog(object):
def __init__(self):
self.src_files = []
self.include_files = []
self.include_dirs = []
self.tb_src_files = []
self.tb_private_src_files = []
self.tb_include_files = []
self.tb_include_dirs = []
class Simulator(object):
def __init__(self, system):
config = Config()
self.system = system
self.build_root = os.path.join(config.build_root, self.system.name)
self.src_root = os.path.join(self.build_root, 'src')
if 'toplevel' in self.system.simulator:
self.toplevel = self.system.simulator['toplevel']
else:
self.toplevel = 'orpsoc_tb'
self.vpi_modules = []
self.cm = CoreManager()
self.cores = self.cm.get_depends(self.system.name)
logger.debug( "depend --> " +str (self.cores))
self.env = os.environ.copy()
self.env['CORE_ROOT'] = os.path.abspath(self.system.core_root)
self.env['BUILD_ROOT'] = os.path.abspath(self.build_root)
self.env['SIMULATOR'] = self.TOOL_NAME
self.verilog = _Verilog()
for core_name in self.cores:
logger.debug('core_name=' + core_name)
core = self.cm.get_core(core_name)
if core.verilog:
if core.verilog.include_dirs:
logger.debug('core.include_dirs=' + str(core.verilog.include_dirs))
else:
logger.debug('core.include_dirs=None')
self.verilog.include_dirs += [os.path.join(self.src_root, core_name, d) for d in core.verilog.include_dirs]
self.verilog.tb_include_dirs += [os.path.join(self.src_root, core_name, d) for d in core.verilog.tb_include_dirs]
self.verilog.src_files += [os.path.join(self.src_root, core_name, f) for f in core.verilog.src_files]
self.verilog.tb_src_files += [os.path.join(self.src_root, core_name, f) for f in core.verilog.tb_src_files]
if core_name == self.system.name:
self.verilog.tb_src_files += [os.path.join(self.src_root, core_name, f) for f in core.verilog.tb_private_src_files]
if core.vpi:
vpi_module = {}
core_root = os.path.join(self.src_root, core_name)
vpi_module['include_dirs'] = [os.path.abspath(os.path.join(core_root, d)) for d in core.vpi.include_dirs]
vpi_module['src_files'] = [os.path.abspath(os.path.join(core_root, f)) for f in core.vpi.src_files]
vpi_module['name'] = core.name
vpi_module['libs'] = [l for l in core.vpi.libs]
self.vpi_modules += [vpi_module]
def configure(self):
if os.path.exists(self.sim_root):
for f in os.listdir(self.sim_root):
if os.path.isdir(os.path.join(self.sim_root, f)):
shutil.rmtree(os.path.join(self.sim_root, f))
else:
os.remove(os.path.join(self.sim_root, f))
else:
os.makedirs(self.sim_root)
self.env['SIM_ROOT'] = os.path.abspath(self.sim_root)
for name in self.cores:
pr_info("Preparing " + name)
dst_dir = os.path.join(Config().build_root, self.system.name, 'src', name)
core = self.cm.get_core(name)
try:
core.setup()
except URLError as e:
raise RuntimeError("Problem while fetching '" + core.name + "': " + str(e.reason))
except urllib.HTTPError as e:
raise RuntimeError("Problem while fetching '" + core.name + "': " + str(e.reason))
core.export(dst_dir)
def build(self):
for script in self.system.pre_build_scripts:
script = os.path.abspath(os.path.join(self.system.core_root, script))
pr_info("Running " + script);
try:
Launcher(script, cwd = self.sim_root, env = self.env, shell=True).run()
except RuntimeError:
pr_err("Error: script " + script + " failed")
return
def run(self, args):
parser = argparse.ArgumentParser(prog ='fusesoc sim '+self.system.name, conflict_handler='resolve')
for name in self.cores:
core = self.cm.get_core(name)
if core.plusargs:
core.plusargs.add_arguments(parser)
p = parser.parse_args(args)
self.plusargs = []
for key,value in vars(p).items():
if value == True:
self.plusargs += [key]
elif value == False or value is None:
pass
else:
self.plusargs += [key+'='+str(value[0])]
for script in self.system.pre_run_scripts:
script = os.path.abspath(os.path.join(self.system.core_root, script))
pr_info("Running " + script);
try:
Launcher(script, cwd = self.sim_root, env = self.env, shell=True).run()
except RuntimeError:
pr_err("Error: script " + script + " failed")
def done(self, args):
for script in self.system.post_run_scripts:
script = os.path.abspath(os.path.join(self.system.core_root, script))
pr_info("Running " + script);
try:
Launcher(script, cwd = self.sim_root, env = self.env, shell=True).run()
except RuntimeError:
pr_err("Error: script " + script + " failed")
| gpl-3.0 | -1,031,246,171,377,463,400 | 38.569536 | 138 | 0.557992 | false |
chouseknecht/galaxy | update-authors.py | 1 | 1234 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import logging
logger = logging.getLogger(__name__)
import subprocess
from collections import defaultdict
user_scores = defaultdict(int)
git_log = subprocess.check_output("git log --shortstat --no-merges --pretty='%aN <%aE>'",
shell=True)
log_entries = git_log.decode('utf-8').strip().split('\n')
while log_entries:
author = log_entries.pop(0)
_ = log_entries.pop(0)
commit_line = log_entries.pop(0)
commit_parts = [s.strip() for s in commit_line.split(', ')]
for clause in commit_parts:
count, action = clause.split(' ', 1)
if action.endswith('(+)'):
user_scores[author] += int(count)
elif action.endswith('(-)'):
user_scores[author] += int(count)
else:
user_scores[author] += int(count)
sorted_user_scores = sorted(user_scores.items(), key=lambda tpl: tpl[1], reverse=True)
print("Galaxy has been contribued to by the following authors:\n"
"This list is automatically generated - please file an issue for corrections)\n")
for author, _ in sorted_user_scores:
print(author)
| apache-2.0 | 1,582,264,523,541,947,400 | 32.351351 | 89 | 0.635332 | false |
mxgnene01/itester | itester/test_main.py | 1 | 3065 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Meng xiangguo <mxgnene01@gmail.com>
#
# _____ ______
# ____==== ]OO|_n_n__][. | |]
# [________]_|__|________)< |MENG|
# oo oo 'oo OOOO-| oo\_ ~o~~~o~'
# +--+--+--+--+--+--+--+--+--+--+--+--+--+
# 2017/10/16 下午5:47
import os
import nose
import json
import requests
from common.parameterized import parameterized
from common.tools import assertDictContains, prepareStrToDict, prepareRequestsParam, findAllFile, encodeutf8
path = os.getenv('ITESTER_CASE_PATH', os.path.join(os.path.dirname(os.path.abspath(__file__)), '../testcase'))
TEST_CASES = findAllFile(path)
class test_iterster():
'''
测试的主入口类
'''
@parameterized.expand(TEST_CASES)
def test_runcase(self, name, desc, use_yn, method, url, headers, cookies, params, expect_value, func, proxies_proxy):
'''
接口的测试 - 主方法
:param name: case 名称
:param desc: case的详细描述
:param method: GET/POST
:param url: 调用的接口
:param headers: h1: v1\n h2: v2
:param cookies: c1: v1\n c2: v2
:param param: key1=value1&key2=value2
:param expect_value: 预计返回值
:param func:assert_equal - 预计返回值和实际返回值相等/assert_in - 实际返回值包含预计返回值
:param proxies_proxy - 代理设置
'''
headers_dict = prepareStrToDict(headers)
cookies_dict = prepareStrToDict(cookies)
if params:
try:
params_dict = prepareRequestsParam(params)
except:
# params 为json格式
params_dict = json.dumps(json.loads(params))
headers_dict['content-type'] = 'application/json'
else:
params_dict = {}
proxies = {
"http": proxies_proxy,
"https": proxies_proxy,
}
if method == "GET":
if proxies_proxy:
response = requests.get(url, headers=headers_dict, cookies=cookies_dict, params=params_dict, proxies=proxies)
else:
response = requests.get(url, headers=headers_dict, cookies=cookies_dict, params=params_dict)
elif method == "POST":
if proxies_proxy:
response = requests.post(url, headers=headers_dict, cookies=cookies_dict, data=params_dict, proxies=proxies)
else:
response = requests.post(url, headers=headers_dict, cookies=cookies_dict, data=params_dict)
if func == 'assert_equal':
error_lists = assertDictContains(expect_value, response.content, u'node:', err_list=[])
elif func == 'assert_in':
error_lists = assertDictContains(json.loads(expect_value), response.json(), u'node:', err_list=[])
if len(error_lists) > 0:
raise AssertionError("%s error" % name)
else:
return
if __name__ == '__main__':
nose.run()
| gpl-3.0 | 7,690,977,383,853,747,000 | 34.719512 | 125 | 0.556845 | false |
davidfauth/neo4j-pubnub | server/stocktickerneo.py | 1 | 8629 | import sys
import threading
from Queue import Queue
import time
import datetime
import random
import json
import py2neo
from py2neo import Graph, Path
NEO_PROTOCOL = 'http'
NEO_HOST = 'localhost'
NEO_PORT = 7474
NEO_USER = 'neo4j'
NEO_PASSWORD = 'password'
from Pubnub import Pubnub
'''
Global Data - Queue
'''
globalQueueRef = None
'''
Client Listener Thread
'''
class ClientListenerThread(threading.Thread):
def __init__(self,server,port,queueRef,pnb):
threading.Thread.__init__(self)
graph = Graph('%s://%s:%s@%s:%s/db/data/' %
(NEO_PROTOCOL, NEO_USER, NEO_PASSWORD, NEO_HOST, NEO_PORT))
self.clientQueue = queueRef
self.pnb = pnb
def run(self):
try :
while True :
print "Before queue block"
data = self.clientQueue.get()
print "After queue block"
print data
req = json.loads(data)
# self.publishPriceHistory(req['name'],req['backtime'],req['channel'])
except Exception as e:
print "Failure in Client Request Handling"
print e
def publishPriceHistory(self,idxname,time,channel):
broadcastDict = []
timefrom = self.getLastUpdateTime(idxname)
timefrom = timefrom - (time * 60)
it = self.coll.find({'name': idxname , 'time' : { '$gte' : timefrom } })
for item in it:
broadcastDict.append({ "name" : item['name'],
"value" : item['value'],
"change" : item['change'],
"time" : item['time']
})
broadcastData = json.dumps(broadcastDict)
print 'Broadcasting Price History : ' + broadcastData
self.pnb.publish(channel,broadcastData)
def getLastUpdateTime(self,idxname):
query = [{'$group': {'_id': '$name', 'maxValue': {'$max': '$time'}}}]
result = self.coll.aggregate(query)
for entry in result['result']:
if (entry['_id'] == idxname):
return entry['maxValue']
return None
'''
Description - Main server loop
Data will be stored in the following JSON format
{
"name" : "NASDAQ" ,
"value" : "6345.25" ,
"change" : "+13.45" ,
"time" : 1412322567
}
'''
def startStockPicker(server,port):
global globalQueueRef
global graph
#Step 1 - Initialize MongoDB & PubNub Connection
# py2neo.set_auth_token('%s:%s' % (NEO_HOST, NEO_PORT), NEO_AUTH_TOKEN)
graph = Graph('%s://%s:%s@%s:%s/db/data/' %
(NEO_PROTOCOL, NEO_USER, NEO_PASSWORD, NEO_HOST, NEO_PORT))
#YOUR PUBNUB KEYS - Replace the publish_key and subscriber_key below with your own keys
pubnub = Pubnub(publish_key="<your pub key>",subscribe_key="<your sub key>")
#Step 2 - Check and define the metadata ( index names )
metaDataInit()
#Step 3 - Set the parameters , max periodicity , random range
updateTime = 10 #Max ten seconds for every price update
numOfItems = 4 #Four indices to manage
random.seed()
#Step 4 - Setup the Queue and ClientListener Thread
clientQueue = Queue()
clientListener = ClientListenerThread(server,port,clientQueue,pubnub)
clientListener.start()
globalQueueRef = clientQueue
#Step 5 - Setup PubNub Subscription for client requests
pubnub.subscribe("stockhistory", historyCallback,historyError)
#Step 6 - Start the stock picking loop
while True:
#Step 6.1 - Wait for random time
time.sleep(random.randint(1,updateTime))
#Step 6.2 - Wake up and update the stock price for one of the index
newPriceData = getUpdatedPrice()
#Step 6.3 - Update the new price in DB
print "New Price Update " + str(newPriceData)
#Step 6.4 - Publish over Pubnub , stockdata channel
broadcastData = { 'name' : newPriceData['name'],
'value' : newPriceData['value'],
'change' : newPriceData['change'],
'time' : newPriceData['time'],
}
pubnub.publish('stockdata',json.dumps(broadcastData))
'''
Description - Populate the index names to track and initial database
'''
def metaDataInit():
global metadataDescr
#Four major world indices to manage
metadataDescr = ['NASDAQ','DOWJONES','FTSE','NIKKEI']
cyres = graph.cypher.execute("""MERGE (s:Stock {name:'FTSE', value:6637.92, change:-16.02 , time : 1})""");
cyres = graph.cypher.execute("""MERGE (s:Stock {name:'NASDAQ', value:4630.60, change:+6.06 , time : 1})""");
cyres = graph.cypher.execute("""MERGE (s:Stock {name:'DOWJONES', value:17630.60, change:-36.02 , time : 1})""");
cyres = graph.cypher.execute("""MERGE (s:Stock {name:'NIKKEI', value:17336.12, change:-23.02 , time : 1})""");
'''
Description - This function simulates the stock index price update
Gets the new price details for indices based on random
selection
Return - Returns the JSON formatted index name, price , delta and time
'''
def getUpdatedPrice():
#Random select the index whose price is to be updated
idx = random.sample(metadataDescr,1)
#Randomly get a price increment in range of 1.0 to 10.0
#It is assumed that price delta will always be in this range
pricedelta = round(random.uniform(1.0,10.0),2)
#Randomly get the direction of price change
#Either positive or negative
pricedir = random.randint(0,1)
#Get the current price of index
#currprice = getCurrentPrice(coll,idx[0])
queryString = """MATCH (s:Stock {name:'"""
queryString = queryString + idx[0]
queryString = queryString + """'}) return s.value"""
print queryString
cyres = graph.cypher.execute(queryString);
print cyres
for r in cyres:
currprice = r[0]
#Calculate new price of index based on pricedelta and pricedir
if(pricedir):
newprice = round(currprice + pricedelta,2)
pricedeltastr = '+'+str(pricedelta)
else :
newprice = round(currprice - pricedelta,2)
pricedeltastr = '-'+str(pricedelta)
queryString = """MATCH (s:Stock {name:'"""
queryString = queryString + idx[0]
queryString = queryString + """'}) SET s.value = """ + str(newprice)
print queryString
cyres = graph.cypher.execute(queryString);
print "New Price for " + " : " + str(newprice)
#Get the current time of update
updateTime = getCurrentTimeInSecs()
#Return the new index price
return {
'name' : idx[0] ,
'value' : newprice ,
'change' : pricedeltastr ,
'time' : updateTime
}
'''
Description - This function fetches the most recent price update of
an index idxname
Returns - Last updated price
'''
def getCurrentPrice(coll,idxname):
query = [{'$group': {'_id': '$name', 'maxValue': {'$max': '$time'}}}]
result = coll.aggregate(query)
for entry in result['result']:
if (entry['_id'] == idxname):
it = coll.find({'name' : idxname , 'time' : entry['maxValue'] }).limit(1)
val = it.next()['value']
print "Last Updated Price for " + idxname + " : " + str(val)
return val
return None
'''
Description - This function simulates the stock index price update
Gets the new price details for indices based on random
selection
Return - Returns the JSON formatted index name, price , delta and time
'''
def getUpdatedPrice(coll):
#Random select the index whose price is to be updated
idx = random.sample(metadataDescr,1)
#Randomly get a price increment in range of 1.0 to 10.0
#It is assumed that price delta will always be in this range
pricedelta = round(random.uniform(1.0,10.0),2)
#Randomly get the direction of price change
#Either positive or negative
pricedir = random.randint(0,1)
#Get the current price of index
currprice = getCurrentPrice(coll,idx[0])
#Calculate new price of index based on pricedelta and pricedir
if(pricedir):
newprice = round(currprice + pricedelta,2)
pricedeltastr = '+'+str(pricedelta)
else :
newprice = round(currprice - pricedelta,2)
pricedeltastr = '-'+str(pricedelta)
print "New Price for " + idx[0] + " : " + str(newprice)
#Get the current time of update
updateTime = getCurrentTimeInSecs()
#Return the new index price
return {
'name' : idx[0] ,
'value' : newprice ,
'change' : pricedeltastr ,
'time' : updateTime
}
'''
Description - Get the current system time in unix timestamp format
'''
def getCurrentTimeInSecs():
dtime = datetime.datetime.now()
ans_time = time.mktime(dtime.timetuple())
return int(ans_time)
'''
PubNub Callback for inciming requests on global listening channel
'''
def historyCallback(message, channel):
global globalQueueRef
print "Received Historical Data Request :" + message
globalQueueRef.put(message) # Read end in the CLientListenerThread
def historyError(message):
print "Error in receiving Historical Data Request : " + message
if __name__ == '__main__':
print sys.argv
if (len(sys.argv) == 3):
startStockPicker(sys.argv[1],int(sys.argv[2]))
else:
print "Error in arguments"
| mit | -6,604,526,154,781,309,000 | 24.758209 | 113 | 0.676208 | false |
theodoregoetz/clas12-dc-wiremap | scratch/dcwiremap.py | 1 | 1201 | import time
import numpy as np
from numpy import random as rand
from matplotlib import pyplot, gridspec, animation
data = rand.uniform(0,100,(36,6,112))
fig = pyplot.figure(1, (18,8))
axs = []
pts = []
sector_grid = gridspec.GridSpec(2,3,wspace=0.3,hspace=0.3)
for sec in range(6):
slyr_grid = gridspec.GridSpecFromSubplotSpec(6,1,
wspace=0.0,hspace=0.1,
subplot_spec=sector_grid[sec])
for slyr in range(6):
axs += [fig.add_subplot(slyr_grid[slyr])]
ax = axs[-1]
pts += [ax.imshow(data[sec*6 + (5-slyr)],
origin='lower',
aspect='auto',
interpolation='nearest',
extent=[-0.5,111.5,-0.5,5.5])]
if slyr == 0:
ax.set_title('Sector '+str(sec+1))
ax.set_ylabel(str(6-slyr))
ax.xaxis.set_major_locator(pyplot.NullLocator())
ax.yaxis.set_major_locator(pyplot.NullLocator())
def update(i):
data = rand.uniform(0,100,(36,6,112))
for sec in range(6):
for slyr in range(6):
i = sec*6 + slyr
pt = pts[i]
pt.set_data(data[i])
ani = animation.FuncAnimation(fig, update, np.arange(1, 200),interval=1)
pyplot.show()
| gpl-3.0 | -583,575,538,828,056,300 | 25.688889 | 72 | 0.582015 | false |
drnextgis/QGIS | python/plugins/processing/algs/gdal/warp.py | 1 | 9529 | # -*- coding: utf-8 -*-
"""
***************************************************************************
self.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.core.parameters import ParameterRaster
from processing.core.parameters import ParameterExtent
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterCrs
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputRaster
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class warp(GdalAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
SOURCE_SRS = 'SOURCE_SRS'
DEST_SRS = 'DEST_SRS'
METHOD = 'METHOD'
METHOD_OPTIONS = ['near', 'bilinear', 'cubic', 'cubicspline', 'lanczos']
TR = 'TR'
NO_DATA = 'NO_DATA'
EXTRA = 'EXTRA'
RTYPE = 'RTYPE'
TYPE = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64']
TILED = 'TILED'
COMPRESS = 'COMPRESS'
JPEGCOMPRESSION = 'JPEGCOMPRESSION'
PREDICTOR = 'PREDICTOR'
ZLEVEL = 'ZLEVEL'
BIGTIFF = 'BIGTIFF'
BIGTIFFTYPE = ['', 'YES', 'NO', 'IF_NEEDED', 'IF_SAFER']
COMPRESSTYPE = ['NONE', 'JPEG', 'LZW', 'PACKBITS', 'DEFLATE']
TFW = 'TFW'
RAST_EXT = 'RAST_EXT'
EXT_CRS = 'EXT_CRS'
def getIcon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'warp.png'))
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Warp (reproject)')
self.group, self.i18n_group = self.trAlgorithm('[GDAL] Projections')
self.tags = self.tr('transform,reproject,crs,srs')
self.addParameter(ParameterRaster(self.INPUT, self.tr('Input layer'), False))
self.addParameter(ParameterCrs(self.SOURCE_SRS,
self.tr('Source SRS'), '', optional=True))
self.addParameter(ParameterCrs(self.DEST_SRS,
self.tr('Destination SRS'), 'EPSG:4326'))
self.addParameter(ParameterString(self.NO_DATA,
self.tr("Nodata value, leave blank to take the nodata value from input"),
'', optional=True))
self.addParameter(ParameterNumber(self.TR,
self.tr('Output file resolution in target georeferenced units (leave 0 for no change)'),
0.0, None, 0.0))
self.addParameter(ParameterSelection(self.METHOD,
self.tr('Resampling method'), self.METHOD_OPTIONS))
self.addParameter(ParameterExtent(self.RAST_EXT, self.tr('Raster extent'), optional=True))
if GdalUtils.version() >= 2000000:
self.addParameter(ParameterCrs(self.EXT_CRS,
self.tr('CRS of the raster extent, leave blank for using Destination SRS'),
optional=True))
params = []
params.append(ParameterSelection(self.RTYPE,
self.tr('Output raster type'), self.TYPE, 5))
params.append(ParameterSelection(self.COMPRESS,
self.tr('GeoTIFF options. Compression type:'), self.COMPRESSTYPE, 4))
params.append(ParameterNumber(self.JPEGCOMPRESSION,
self.tr('Set the JPEG compression level'),
1, 100, 75))
params.append(ParameterNumber(self.ZLEVEL,
self.tr('Set the DEFLATE compression level'),
1, 9, 6))
params.append(ParameterNumber(self.PREDICTOR,
self.tr('Set the predictor for LZW or DEFLATE compression'),
1, 3, 1))
params.append(ParameterBoolean(self.TILED,
self.tr('Create tiled output (only used for the GTiff format)'), False))
params.append(ParameterSelection(self.BIGTIFF,
self.tr('Control whether the created file is a BigTIFF or a classic TIFF'), self.BIGTIFFTYPE, 0))
params.append(ParameterBoolean(self.TFW,
self.tr('Force the generation of an associated ESRI world file (.tfw))'), False))
params.append(ParameterString(self.EXTRA,
self.tr('Additional creation parameters'), '', optional=True))
for param in params:
param.isAdvanced = True
self.addParameter(param)
self.addOutput(OutputRaster(self.OUTPUT, self.tr('Reprojected')))
def getConsoleCommands(self):
noData = self.getParameterValue(self.NO_DATA)
if noData is not None:
noData = str(noData)
srccrs = self.getParameterValue(self.SOURCE_SRS)
dstcrs = self.getParameterValue(self.DEST_SRS)
jpegcompression = str(self.getParameterValue(self.JPEGCOMPRESSION))
predictor = str(self.getParameterValue(self.PREDICTOR))
zlevel = str(self.getParameterValue(self.ZLEVEL))
tiled = str(self.getParameterValue(self.TILED))
compress = self.COMPRESSTYPE[self.getParameterValue(self.COMPRESS)]
bigtiff = self.BIGTIFFTYPE[self.getParameterValue(self.BIGTIFF)]
tfw = str(self.getParameterValue(self.TFW))
rastext = self.getParameterValue(self.RAST_EXT)
rastext_crs = self.getParameterValue(self.EXT_CRS)
arguments = []
arguments.append('-ot')
arguments.append(self.TYPE[self.getParameterValue(self.RTYPE)])
if srccrs:
arguments.append('-s_srs')
arguments.append(srccrs)
if dstcrs:
arguments.append('-t_srs')
arguments.append(dstcrs)
if noData:
arguments.append('-dstnodata')
arguments.append(noData)
arguments.append('-r')
arguments.append(
self.METHOD_OPTIONS[self.getParameterValue(self.METHOD)])
arguments.append('-of')
out = self.getOutputValue(self.OUTPUT)
arguments.append(GdalUtils.getFormatShortNameFromFilename(out))
if self.getParameterValue(self.TR) != 0:
arguments.append('-tr')
arguments.append(str(self.getParameterValue(self.TR)))
arguments.append(str(self.getParameterValue(self.TR)))
extra = self.getParameterValue(self.EXTRA)
if extra is not None:
extra = str(extra)
if rastext:
regionCoords = rastext.split(',')
if len(regionCoords) >= 4:
arguments.append('-te')
arguments.append(regionCoords[0])
arguments.append(regionCoords[2])
arguments.append(regionCoords[1])
arguments.append(regionCoords[3])
if GdalUtils.version() >= 2000000:
if rastext_crs:
arguments.append('-te_srs')
arguments.append(rastext_crs)
if extra and len(extra) > 0:
arguments.append(extra)
if GdalUtils.getFormatShortNameFromFilename(out) == "GTiff":
arguments.append("-co COMPRESS=" + compress)
if compress == 'JPEG':
arguments.append("-co JPEG_QUALITY=" + jpegcompression)
elif (compress == 'LZW') or (compress == 'DEFLATE'):
arguments.append("-co PREDICTOR=" + predictor)
if compress == 'DEFLATE':
arguments.append("-co ZLEVEL=" + zlevel)
if tiled == "True":
arguments.append("-co TILED=YES")
if tfw == "True":
arguments.append("-co TFW=YES")
if len(bigtiff) > 0:
arguments.append("-co BIGTIFF=" + bigtiff)
arguments.append("-wo OPTIMIZE_SIZE=TRUE")
if GdalUtils.version() in [2010000, 2010100]:
arguments.append("--config GDALWARP_IGNORE_BAD_CUTLINE YES")
arguments.append(self.getParameterValue(self.INPUT))
arguments.append(out)
return ['gdalwarp', GdalUtils.escapeAndJoin(arguments)]
| gpl-2.0 | 8,993,122,762,215,593,000 | 44.8125 | 138 | 0.559975 | false |
AntoinePrv/hyperNN | problem/runnee.py | 1 | 2008 | import sys
import os
dir = "/".join(sys.argv[0].split("/")[:-2])
if dir != "":
os.chdir(dir)
sys.path.append(".")
import argparse
from load_data import load_data_bis
from train_MINST import train_model
from logger import custom_logger
def run(**kwargs):
data = load_data_bis()
acc, _ = train_model(data, **kwargs)
return acc
if __name__ == "__main__":
# create logger
logger = custom_logger("train_MINST", "log/runnee.log")
# gets arguments
parser = argparse.ArgumentParser(description="Runs MNIST")
parser.add_argument("--n_epoch", help="Number of epochs")
parser.add_argument("--batch_size", help="Batch size")
parser.add_argument("--noeuds", help="Nombre de noeuds", nargs="*")
parser.add_argument("--activation", help="Activation: relu, sigmoid, tanh")
parser.add_argument("--learning_rate", help="Learning rate")
parser.add_argument("--reg_l1", help="L1 regularization coefficient")
parser.add_argument("--reg_l2", help="L2 regularization coefficient")
parser.add_argument("--moment", help="Momentum for the gradient descent")
parser.add_argument("--decay", help="Decay for the learning_rate")
parser.add_argument("--nesterov", help="Using nesterov for the momentum")
args = vars(parser.parse_args())
params = {}
for key in args:
val = args[key]
if val is not None:
if key in ["n_epoch", "batch_size"]:
params[key] = int(val)
if key in ["learning_rate", "reg_l1", "reg_l2", "moment", "decay"]:
params[key] = float(val)
if key in ["nesterov"]:
params[key] = bool(val)
if key in ["activation"]:
params[key] = val
if key == "noeuds":
neurons = map(lambda x: int(x), val)
neurons = filter(lambda x: x > 0, neurons)
params[key] = neurons
params["n_couches"] = len(neurons)
acc = run(**params)
print(acc)
| mit | -6,205,068,182,693,240,000 | 34.22807 | 79 | 0.590637 | false |
Lrcezimbra/google-python-exercises | logpuzzle/logpuzzle.py | 1 | 2212 | #!/usr/bin/python
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
import os
import re
import sys
from urllib.request import urlretrieve
from pathlib import Path
"""Logpuzzle exercise
Given an apache logfile, find the puzzle urls and download the images.
Here's what a puzzle url looks like:
10.254.254.28 - - [06/Aug/2007:00:13:48 -0700] "GET /~foo/puzzle-bar-aaab.jpg HTTP/1.0" 302 528 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6"
"""
def read_urls(filename):
"""Returns a list of the puzzle urls from the given log file,
extracting the hostname from the filename itself.
Screens out duplicate urls and returns the urls sorted into
increasing order."""
hostname = 'http://' + filename
with open(filename) as file:
log = file.read()
images_list = re.findall('\S*/images/puzzle\S*', log)
images = sorted(set(images_list))
return [hostname + image for image in images]
def download_images(img_urls, dest_dir):
"""Given the urls already in the correct order, downloads
each image into the given directory.
Gives the images local filenames img0, img1, and so on.
Creates an index.html in the directory
with an img tag to show each local image file.
Creates the directory if necessary.
"""
path = Path(dest_dir)
if not path.exists():
path.mkdir()
for i, img_url in enumerate(img_urls):
img_filename = 'img%03d.jpg' % i
urlretrieve(img_url, '%s/%s' % (dest_dir, img_filename))
with open('%s/index.html' % dest_dir, 'a') as file:
file.write('<img src="%s" />\n' % img_filename)
def main():
args = sys.argv[1:]
if not args:
print('usage: [--todir dir] logfile ')
sys.exit(1)
todir = ''
if args[0] == '--todir':
todir = args[1]
del args[0:2]
img_urls = read_urls(args[0])
if todir:
download_images(img_urls, todir)
else:
print('\n'.join(img_urls))
if __name__ == '__main__':
main()
| apache-2.0 | 3,068,062,891,926,408,000 | 27 | 192 | 0.639241 | false |
DigitalCampus/django-oppia | api/resources/category.py | 1 | 4631 | import json
from django.conf.urls import url
from django.db.models import Q
from django.http import HttpResponse, Http404
from tastypie import fields
from tastypie.authentication import ApiKeyAuthentication
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.resources import ModelResource
from tastypie.utils import trailing_slash
from oppia.models import Course, Category
from api.resources.course import CourseResource
class CategoryResource(ModelResource):
count = fields.IntegerField(readonly=True)
class Meta:
queryset = Category.objects.all()
resource_name = 'tag'
allowed_methods = ['get']
fields = ['id',
'name',
'description',
'highlight',
'icon',
'order_priority']
authentication = ApiKeyAuthentication()
authorization = ReadOnlyAuthorization()
always_return_data = True
include_resource_uri = False
def get_object_list(self, request):
if request.user.is_staff:
return Category.objects.filter(
courses__isnull=False,
coursecategory__course__is_archived=False).distinct().order_by(
'-order_priority', 'name')
else:
return Category.objects.filter(
courses__isnull=False,
coursecategory__course__is_archived=False) \
.filter(
Q(coursecategory__course__is_draft=False) |
(Q(coursecategory__course__is_draft=True)
& Q(coursecategory__course__user=request.user)) |
(Q(coursecategory__course__is_draft=True)
& Q(coursecategory__course__coursepermissions__user=
request.user))
) \
.distinct().order_by('-order_priority', 'name')
def prepend_urls(self):
return [
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view('tag_detail'),
name="api_tag_detail"),
]
def tag_detail(self, request, **kwargs):
self.is_authenticated(request)
self.throttle_check(request)
pk = kwargs.pop('pk', None)
try:
category = self._meta.queryset.get(pk=pk)
except Category.DoesNotExist:
raise Http404()
if request.user.is_staff:
courses = Course.objects.filter(
category=category,
is_archived=False).order_by('-priority', 'title')
else:
courses = Course.objects.filter(category=category,
is_archived=False) \
.filter(
Q(is_draft=False) |
(Q(is_draft=True) & Q(user=request.user)) |
(Q(is_draft=True)
& Q(coursepermissions__user=request.user))
) \
.distinct().order_by('-priority', 'title')
course_data = []
cr = CourseResource()
for c in courses:
bundle = cr.build_bundle(obj=c, request=request)
cr.full_dehydrate(bundle)
course_data.append(bundle.data)
response = HttpResponse(
content=json.dumps({'id': pk,
'count': courses.count(),
'courses': course_data,
'name': category.name}),
content_type="application/json; charset=utf-8")
return response
def dehydrate_count(self, bundle):
tmp = Course.objects.filter(category__id=bundle.obj.id,
is_archived=False)
if bundle.request.user.is_staff:
count = tmp.count()
else:
count = tmp.filter(Q(is_draft=False) |
(Q(is_draft=True) &
Q(user=bundle.request.user))).count()
return count
def dehydrate_icon(self, bundle):
if bundle.data['icon'] is not None:
return bundle.request.build_absolute_uri(bundle.data['icon'])
else:
return None
def alter_list_data_to_serialize(self, request, data):
if isinstance(data, dict) and 'objects' in data:
data['tags'] = data['objects']
del data['objects']
return data
| gpl-3.0 | 2,553,082,281,161,417,700 | 36.346774 | 79 | 0.521054 | false |
evazyin/Capstoneproject | seedwords.py | 1 | 2459 | '''version1 seed words algorithm'''
'''------------
input: a IR corpus, a expanding corpus(the candidate wordlist), a seed wordlist
output: a expanded seed wordlist
--------------'''
import math
file = open('D:/uw course/capstone/mypersonality/IRtest2.txt')#IR corpus
file1 = open('D:/uw course/capstone/mypersonality/ECtest2grams.txt')#candidate
file2 = open('D:/uw course/capstone/mypersonality/wordlist_lemma.txt')#keyword
file3 = open('D:/uw course/capstone/mypersonality/wordlist_1_expanded.txt','w')
total=0#to trace the process
candidatelist = file1.readline().split(",")
keywordlist = file2.readline().split(",")
candidatecount=[0]*len(candidatelist)
print(len(candidatelist))
keywordcount=[0]*len(keywordlist)
cooccur= [[0 for col in range(len(keywordlist))] for row in range(len(candidatelist))]
PMI=[[0 for col in range(len(keywordlist))] for row in range(len(candidatelist))]
'''-----PMI calculation------'''
while 1:
#while total<=127679:
line=file.readline()
if not line:
break
total+=1
i=0
j=0
candidate=[0]*len(candidatelist)
keyword=[0]*len(keywordlist)
for w1 in candidatelist:
print(w1)
candidate[i]=line.count(w1)
candidatecount[i]+=candidate[i]
i+=1;
for w2 in keywordlist:
keyword[j]=line.count(w2)
keywordcount[j]+=keyword[j]
j+=1;
for p in range(0,i-1):###may need debug for matrix demensions
for q in range(0,j-1):
if candidate[p]*keyword[q] >0:
cooccur[p][q]+=min(candidate[p],keyword[q]) #!!!!!!!
#print(cooccur)
for N in range(1,100):
if total == math.floor(12767965*N*0.01):
print('%d' %N)
for p in range(0,i-1):
for q in range(0,j-1):
if candidatecount[p]*keywordcount[q]*cooccur[p][q]>0:
#PMI[p][q]=math.log(cooccur[p][q]/(candidatecount[p]*keywordcount[q]))
PMI[p][q]=cooccur[p][q]/(candidatecount[p]*keywordcount[q])
else:
PMI[p][q]=0
#print(PMI)
'''--------------------------'''
Threshold = 0.1
for p in range(0,i-1):
PMI_MAX = PMI[p][0]
for q in range(1,j-1):
a = PMI[p][q]
if a>PMI_MAX:
PMI_MAX=a
if PMI_MAX > Threshold:
if candidate[p] not in keywordlist:#caution! and need further consideration
keywordlist.append(candidatelist[p])
file3.write('%s,' %keywordlist)
file.close()
file1.close()
file2.close()
file3.close()
| agpl-3.0 | -5,441,539,671,506,725,000 | 29.7375 | 86 | 0.611631 | false |
bintlabs/python-sync-db | dbsync/server/conflicts.py | 1 | 2225 | """
.. module:: server.conflicts
:synopsis: Conflict detection for the centralized push operation.
"""
from sqlalchemy.schema import UniqueConstraint
from dbsync.lang import *
from dbsync.utils import get_pk, class_mapper, query_model, column_properties
def find_unique_conflicts(push_message, session):
"""
Returns a list of conflicts caused by unique constraints in the
given push message contrasted against the database. Each conflict
is a dictionary with the following fields::
object: the conflicting object in database, bound to the
session
columns: tuple of column names in the unique constraint
new_values: tuple of values that can be used to update the
conflicting object.
"""
conflicts = []
for pk, model in ((op.row_id, op.tracked_model)
for op in push_message.operations
if op.command != 'd'):
if model is None: continue
for constraint in ifilter(lambda c: isinstance(c, UniqueConstraint),
class_mapper(model).mapped_table.constraints):
unique_columns = tuple(col.name for col in constraint.columns)
remote_obj = push_message.query(model).\
filter(attr('__pk__') == pk).first()
remote_values = tuple(getattr(remote_obj, col, None)
for col in unique_columns)
if all(value is None for value in remote_values): continue
local_obj = query_model(session, model).\
filter_by(**dict(izip(unique_columns, remote_values))).first()
if local_obj is None: continue
local_pk = getattr(local_obj, get_pk(model))
if local_pk == pk: continue
push_obj = push_message.query(model).\
filter(attr('__pk__') == local_pk).first()
if push_obj is None: continue # push will fail
conflicts.append(
{'object': local_obj,
'columns': unique_columns,
'new_values': tuple(getattr(push_obj, col)
for col in unique_columns)})
return conflicts
| mit | -572,390,988,439,292,000 | 38.035088 | 80 | 0.586067 | false |
embedly/embedly-python | embedly/client.py | 1 | 4750 | """
Client
======
The embedly object that interacts with the service
"""
from __future__ import absolute_import, unicode_literals
import re
import httplib2
import json
from urllib import quote, urlencode
from .models import Url
def get_user_agent():
from . import __version__
return 'Mozilla/5.0 (compatible; embedly-python/%s;)' % __version__
class Embedly(object):
"""
Client
"""
def __init__(self, key=None, user_agent=None, timeout=60):
"""
Initialize the Embedly client
:param key: Embedly Pro key
:type key: str
:param user_agent: User Agent passed to Embedly
:type user_agent: str
:param timeout: timeout for HTTP connection attempts
:type timeout: int
:returns: None
"""
self.key = key
self.user_agent = user_agent or get_user_agent()
self.timeout = timeout
self.services = []
self._regex = None
def get_services(self):
"""
get_services makes call to services end point of api.embed.ly to fetch
the list of supported providers and their regexes
"""
if self.services:
return self.services
url = 'http://api.embed.ly/1/services/python'
http = httplib2.Http(timeout=self.timeout)
headers = {'User-Agent': self.user_agent,
'Connection': 'close'}
resp, content = http.request(url, headers=headers)
if resp['status'] == '200':
resp_data = json.loads(content.decode('utf-8'))
self.services = resp_data
# build the regex that we can use later
_regex = []
for each in self.services:
_regex.append('|'.join(each.get('regex', [])))
self._regex = re.compile('|'.join(_regex))
return self.services
def is_supported(self, url):
"""
``is_supported`` is a shortcut for client.regex.match(url)
"""
return self.regex.match(url) is not None
@property
def regex(self):
"""
``regex`` property just so we can call get_services if the _regex is
not yet filled.
"""
if not self._regex:
self.get_services()
return self._regex
def _get(self, version, method, url_or_urls, **kwargs):
"""
_get makes the actual call to api.embed.ly
"""
if not url_or_urls:
raise ValueError('%s requires a url or a list of urls given: %s' %
(method.title(), url_or_urls))
# a flag we can use instead of calling isinstance() all the time
multi = isinstance(url_or_urls, list)
# throw an error early for too many URLs
if multi and len(url_or_urls) > 20:
raise ValueError('Embedly accepts only 20 urls at a time. Url '
'Count:%s' % len(url_or_urls))
query = ''
key = kwargs.get('key', self.key)
# make sure that a key was set on the client or passed in
if not key:
raise ValueError('Requires a key. None given: %s' % key)
kwargs['key'] = key
query += urlencode(kwargs)
if multi:
query += '&urls=%s&' % ','.join([quote(url) for url in url_or_urls])
else:
query += '&url=%s' % quote(url_or_urls)
url = 'http://api.embed.ly/%s/%s?%s' % (version, method, query)
http = httplib2.Http(timeout=self.timeout)
headers = {'User-Agent': self.user_agent,
'Connection': 'close'}
resp, content = http.request(url, headers=headers)
if resp['status'] == '200':
data = json.loads(content.decode('utf-8'))
if kwargs.get('raw', False):
data['raw'] = content
else:
data = {'type': 'error',
'error': True,
'error_code': int(resp['status'])}
if multi:
return map(lambda url, data: Url(data, method, url),
url_or_urls, data)
return Url(data, method, url_or_urls)
def oembed(self, url_or_urls, **kwargs):
"""
oembed
"""
return self._get(1, 'oembed', url_or_urls, **kwargs)
def preview(self, url_or_urls, **kwargs):
"""
oembed
"""
return self._get(1, 'preview', url_or_urls, **kwargs)
def objectify(self, url_or_urls, **kwargs):
"""
oembed
"""
return self._get(2, 'objectify', url_or_urls, **kwargs)
def extract(self, url_or_urls, **kwargs):
"""
oembed
"""
return self._get(1, 'extract', url_or_urls, **kwargs)
| mit | -4,047,278,784,260,133,000 | 26.616279 | 80 | 0.533053 | false |
lilchurro/vent | vent/menus/ntap.py | 1 | 9189 | import ast
import npyscreen
from vent.api.actions import Action
class CreateNTap(npyscreen.ActionForm):
""" For creating a new network tap container """
def create(self):
self.add_handlers({"^T": self.quit, "^Q": self.quit})
self.add(npyscreen.Textfield,
value='Create a network tap that calls tcpdump and records '
'based on the parameters given ',
editable=False,
color="STANDOUT")
self.add(npyscreen.Textfield,
value='via a POST request '
'to the url of the core network tap tool. ',
editable=False,
color="STANDOUT")
self.add(npyscreen.Textfield,
value='An example payload: ',
editable=False,
color="STANDOUT")
self.add(npyscreen.Textfield,
value=' {"nic": "eth0", "id": "testId", "interval": "60" '
'"filter": "", "iters": "1"} ',
editable=False,
color="STANDOUT")
self.nextrely += 1
self.nic = self.add(npyscreen.TitleText, name='nic')
self.id = self.add(npyscreen.TitleText, name='id')
self.interval = self.add(npyscreen.TitleText, name='interval')
self.filter = self.add(npyscreen.TitleText, name='filter')
self.iters = self.add(npyscreen.TitleText, name='iters')
def on_ok(self):
# error check to make sure all fields were filled out
if not self.nic.value or not self.id.value or not self.interval.value \
or not self.iters.value:
npyscreen.notify_confirm("Please fill out all fields",
form_color='CAUTION')
return
# create a dictionary with user entered data
payload = {}
payload[self.nic.name] = self.nic.value
payload[self.id.name] = self.id.value
payload[self.interval.name] = self.interval.value
payload[self.filter.name] = self.filter.value
payload[self.iters.name] = self.iters.value
# create an action object and have it do the work
self.api_action = Action()
try:
url = self.api_action.get_vent_tool_url('network-tap')[1] + \
'/create'
request = self.api_action.post_request(url, str(payload))
if request[0]:
npyscreen.notify_confirm("Success: " + str(request[1]))
self.quit()
else:
npyscreen.notify_confirm("Failure: " + str(request[1]))
except Exception as e: # pragma: no cover
npyscreen.notify_confirm("Failure: " + str(e))
return
def quit(self, *args, **kwargs):
""" Overriden to switch back to MAIN form """
self.parentApp.switchForm("MAIN")
def on_cancel(self):
""" When user cancels, return to MAIN """
self.quit()
class NICsNTap(npyscreen.ActionForm):
""" For listing all available network interfaces """
def create(self):
self.add_handlers({"^T": self.quit, "^Q": self.quit})
self.add(npyscreen.Textfield,
value='List all avilable network interfaces',
editable=False,
color="STANDOUT")
self.nextrely += 1
try:
self.api_action = Action()
url = self.api_action.get_vent_tool_url('network-tap')[1] + '/nics'
request = self.api_action.get_request(url)
if request[0]:
box = self.add(npyscreen.BoxTitle,
name="Available Network Interfaces",
max_height=40)
request = ast.literal_eval(str(request[1]))
data = [d for d in request[1].split("\n")]
box.values = data
else:
npyscreen.notify_confirm("Failure: " + request[1])
except Exception as e: # pragma no cover
npyscreen.notify_confirm("Failure: " + str(e))
def quit(self, *args, **kwargs):
""" Overriden to switch back to MAIN form """
self.parentApp.switchForm("MAIN")
def on_cancel(self):
""" When user cancels, return to MAIN """
self.quit()
def on_ok(self):
self.quit()
class ListNTap(npyscreen.ActionForm):
""" For listing all network tap capture containers """
def create(self):
self.add_handlers({"^T": self.quit, "^Q": self.quit})
self.add(npyscreen.Textfield,
value='List all network tap capture containers',
editable=False,
color="STANDOUT")
self.nextrely += 1
try:
self.api_action = Action()
url = self.api_action.get_vent_tool_url('network-tap')[1] + '/list'
request = self.api_action.get_request(url)
if request[0]:
box = self.add(npyscreen.BoxTitle,
name="Network Tap Capture Containers",
max_height=40)
request = ast.literal_eval(str(request[1]))
data = [d for d in list(request[1])]
box.values = data
else:
npyscreen.notify_confirm("Failure: " + request[1])
except Exception as e: # pragma no cover
npyscreen.notify_confirm("Failure: " + str(e))
def quit(self, *args, **kwargs):
""" Overriden to switch back to MAIN form """
self.parentApp.switchForm("MAIN")
def on_cancel(self):
""" When user cancels, return to MAIN """
self.quit()
def on_ok(self):
self.quit()
class ActionNTap(npyscreen.ActionForm):
""" Base class to inherit from. """
def __init__(self, n_action=None, *args, **kwargs):
self.n_action = n_action
super(ActionNTap, self).__init__(*args, **kwargs)
def create(self):
self.add_handlers({"^T": self.quit, "^Q": self.quit})
self.add(npyscreen.Textfield,
value=self.n_action + ' a network tap capture container.',
editable=False,
color="STANDOUT")
self.add(npyscreen.Textfield,
value='Choose a container to ' + self.n_action,
editable=False,
color="STANDOUT")
self.nextrely += 1
try:
self.api_action = Action()
# display all containers by sending a get request to ntap/list
# nlist returns tuple and get_request returns tuple
url = self.api_action.get_vent_tool_url('network-tap')[1] + '/list'
request = self.api_action.get_request(url)
# create selection for containers
if request[0]:
request = ast.literal_eval(str(request[1]))
data = [d for d in list(request[1])]
self.ms = self.add(npyscreen.TitleMultiSelect, max_height=20,
name='Choose one or more containers to ' +
self.n_action,
values=data)
else:
npyscreen.notify_confirm("Failure: " + str(request[1]))
except Exception as e: # pragma: no cover
npyscreen.notify_confirm("Failure: " + str(e))
def on_ok(self):
# error check to make sure at least one box was selected
if not self.ms.value:
npyscreen.notify_confirm("Please select at least one container.",
form_color='CAUTION')
# format the data into something ncontrol likes
else:
payload = {'id': list(x['id'] for x in
self.ms.get_selected_objects())}
# grab the url that network-tap is listening to
try:
npyscreen.notify_wait("Please wait. Currently working")
self.api_action = Action()
url = self.api_action.get_vent_tool_url('network-tap')[1] + "/" \
+ self.n_action
request = self.api_action.post_request(url, payload)
if request[0]:
npyscreen.notify_confirm("Success: " + str(request[1]))
self.quit()
else:
npyscreen.notify_confirm("Failure: " + str(request[1]))
except Exception as e: # pragma: no cover
npyscreen.notify_confirm("Failure: " + str(e))
def quit(self, *args, **kwargs):
""" Overriden to switch back to MAIN form """
self.parentApp.switchForm("MAIN")
def on_cancel(self):
""" When user cancels, return to MAIN """
self.quit()
class DeleteNTap(ActionNTap):
""" Delete inheritance """
def __init__(self, *args, **kwargs):
ActionNTap.__init__(self, 'delete')
class StartNTap(ActionNTap):
""" Delete inheritance """
def __init__(self, *args, **kwargs):
ActionNTap.__init__(self, 'start')
class StopNTap(ActionNTap):
""" Delete inheritance """
def __init__(self, *args, **kwargs):
ActionNTap.__init__(self, 'stop')
| apache-2.0 | -5,499,286,685,946,207,000 | 34.342308 | 79 | 0.536293 | false |
janusnic/21v-python | unit_17/1.py | 1 | 1114 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from PyQt4 import QtGui, QtCore
class Example(QtGui.QWidget):
def __init__(self):
super(Example, self).__init__()
self.initUI()
def initUI(self):
self.text = u'\u041b\u0435\u0432 \u041d\u0438\u043a\u043e\u043b\u0430\
\u0435\u0432\u0438\u0447 \u0422\u043e\u043b\u0441\u0442\u043e\u0439: \n\
\u0410\u043d\u043d\u0430 \u041a\u0430\u0440\u0435\u043d\u0438\u043d\u0430'
self.setGeometry(300, 300, 280, 170)
self.setWindowTitle('Draw text')
self.show()
def paintEvent(self, event):
qp = QtGui.QPainter()
qp.begin(self)
self.drawText(event, qp)
qp.end()
def drawText(self, event, qp):
qp.setPen(QtGui.QColor(168, 34, 3))
qp.setFont(QtGui.QFont('Decorative', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignCenter, self.text)
def main():
app = QtGui.QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main() | mit | -4,442,118,358,656,477,000 | 23.23913 | 78 | 0.568223 | false |
wiki2014/Learning-Summary | alps/cts/apps/CameraITS/tests/inprog/test_rawstats.py | 2 | 1620 | # Copyright 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import its.image
import its.caps
import its.device
import its.objects
import its.target
import os.path
import math
def main():
"""Test capturing some rawstats data.
"""
NAME = os.path.basename(__file__).split(".")[0]
with its.device.ItsSession() as cam:
cam.do_3a(do_af=False);
req = its.objects.auto_capture_request()
for (gw,gh) in [(16,16)]:#,(4080,1)]:
cap = cam.do_capture(req,
{"format":"rawStats","gridWidth":gw,"gridHeight":gh})
mean_image, var_image = its.image.unpack_rawstats_capture(cap)
if gw > 1 and gh > 1:
h,w,_ = mean_image.shape
for ch in range(4):
m = mean_image[:,:,ch].reshape(h,w,1)/1023.0
v = var_image[:,:,ch].reshape(h,w,1)
its.image.write_image(m, "%s_mean_ch%d.jpg" % (NAME,ch), True)
its.image.write_image(v, "%s_var_ch%d.jpg" % (NAME,ch), True)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,820,512,805,679,995,000 | 32.75 | 82 | 0.612963 | false |
AlienStudio/jpsp_python | jpsp/jpspapp/urls.py | 1 | 2922 | from django.urls import path
from . import views
urlpatterns = [
path('logout', views.student_logout, name="Logout"),
path(r's/login', views.student_login_page, name="StudentLogin"),
path('s/check_login', views.student_check_login, name="StudentCheckLogin"),
path(r's/club/news/<int:page>', views.student_club_news),
path(r's/club/list/<int:page>', views.student_club_list, name='StudentClubList'),
path('s/club/detail/<int:club_id>', views.student_club_detail, name="StudentClubDetail"),
path('s/club/establish', views.student_club_establish, name="StudentClubEstablish"),
path('s/club/attend', views.student_club_attend, name="StudentClubAttend"),
path(r's/dashboard', views.student_dashboard_index, name="StudentDashboardIndex"),
path(r's/dashboard/password', views.student_dashboard_password, name="StudentDashboardPassword"),
path(r's/dashboard/clubs', views.student_dashboard_clubs, name="StudentDashboardClubs"),
path(r's/dashboard/activities', views.student_dashboard_activities, name="StudentDashboardActivities"),
path('cd/login', views.admin_login_page, name="CDLogin"),
path('c/login', views.club_login_page, name="ClubLogin"),
path('c/check_login', views.club_check_login, name="ClubCheckLogin"),
path('c/dashboard', views.club_dashboard_index, name="ClubDashboardIndex"),
path('c/post/add/check',views.club_post_add_check,name="ClubPostAddCheck"),
path('c/post/add',views.club_post_add,name="ClubPostAdd"),
path('c/logout',views.club_logout,name="ClubLogout"),
path('c/member/list',views.club_member_list,name="ClubMemberList"),
path('c/file/upload/list/<int:page>', views.club_file_upload_list, name="ClubFileUploadList"),
path('c/file/download/list/<int:page>', views.club_file_download_list, name="ClubFileDownloadList"),
path('c/file/upload', views.club_file_upload, name="ClubFileUpload"),
path('cd/check_login', views.admin_check_login, name="CDCheckLogin"),
path('cd/dashboard', views.admin_dashboard, name="CDDashboard"),
path('cd/post/list/<int:page>', views.admin_post_list, name="CDPostList"),
path('cd/post/detail/<int:post_id>',views.admin_post_detail,name="CDPostDetail"),
path('cd/post/star',views.admin_post_star,name="CDPostStar"),
path('cd/event/list/<int:page>', views.admin_event_list, name="CDEventList"),
path('cd/file/upload/list/<int:page>', views.admin_file_upload_list, name="CDFileUploadList"),
path('cd/file/download/list/<int:page>', views.admin_file_download_list, name="CDFileDownloadList"),
path('cd/file/upload', views.admin_file_upload, name="CDFileUpload"),
path('cd/student/list', views.admin_student_list, name="CDStudentList"),
path('cd/club/list',views.admin_club_list,name="CDClubList"),
path('s/about', views.about, name="About"),
path('contact', views.contact, name="Contact"),
path('', views.index, name="Index")
]
| mit | 4,627,145,095,779,551,000 | 66.953488 | 107 | 0.711841 | false |
gnomex/analysis | src/arpspoof.py | 1 | 5528 | #!/usr/bin/python
# src from https://github.com/rusec/scapy-arpspoof-demo
from scapy.all import *
from argparse import ArgumentParser
import os
IP_FORWARD = '/proc/sys/net/ipv4/ip_forward'
TIMEOUT = 2
RETRY = 10
# This function uses argparse to parse command line
# arguments passed to the script.
def set_configs():
# create a new ArgumentParser
parser = ArgumentParser()
# add definitions for command line arguments
parser.add_argument('-t',
dest='victim',
required=True,
type=str,
help='The victim\'s IP address')
parser.add_argument('-g',
dest='gateway',
required=True,
type=str,
help='The gateway\'s IP address')
parser.add_argument('-i',
dest='interface',
required=True,
type=str,
help='Use this network interface')
# parse command line arguments according to those definitions
args = parser.parse_args()
# use arguments to construct config dictionary
return {
'victim' : {
'ip' : args.victim,
'mac' : ip_to_mac(args.victim),
},
'gateway' : {
'ip' : args.gateway,
'mac' : ip_to_mac(args.gateway),
},
'iface' : args.interface,
}
# enables packet forwarding by interacting with the proc filesystem
def enable_packet_forwarding():
with open(IP_FORWARD, 'w') as fd:
fd.write('1')
# disables packet forwarding by interacting with the proc filesystem
def disable_packet_forwarding():
with open(IP_FORWARD, 'w') as fd:
fd.write('0')
# use iptables to redirect http traffic to port 10000 where it can
# be parsed using sslstrip
def enable_http_redirection():
print '[*] Redirecting all http traffic to port 10000'
os.system('iptables -v -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port 10000')
# restore iptables to default state
def disable_http_redirection():
print '[*] Disabling http redirection'
os.system('iptables -v --flush')
os.system('iptables -v --table nat --flush')
os.system('iptables -v --delete-chain')
os.system('iptables -v --table nat --delete-chain')
# uses scapy to arp poison victim
def poison_victim(configs):
# get victim and gateway ip and hardware addresses from
# configs
victim_mac = configs['victim_mac']
gateway_mac = configs['gateway_mac']
victim_ip = configs['victim_ip']
gateway_ip = configs['gateway_ip']
# create layer 3 Arp() packets
victim_arp = ARP()
gateway_arp = ARP()
# set Operation to 'is-at'
victim_arp.op = 2
gateway_arp.op = 2
# set hwdst
victim_arp.hwdst = victim_mac
gateway_arp.hwdst = gateway_mac
# set pdst
victim_arp.pdst = victim_ip
gateway_arp.pdst = gateway_ip
# set psrc
victim_arp.psrc = gateway_ip
gateway_arp.psrc = victim_ip
# continue attack indefinitely
while True:
try:
print '[*] Poisoning victim'
# send spoofed arp replies
send(victim_arp)
send(gateway_arp)
# wait for ARP replies from default GW or victim
sniff(filter='arp and host %s or %s' %\
(gateway_ip, victim_ip), count=1)
# break out of loop if user hits ctrl+c
except KeyboardInterrupt:
break
print '[*] All done!'
# restores the victim and gateway's arp cache to its correct
# state
def restore_victim(configs):
victim_mac = configs['victim_mac']
gateway_mac = configs['gateway_mac']
victim_ip = configs['victim_ip']
gateway_ip = configs['gateway_ip']
# create a Layer 3 ARP() packet
victim_arp = ARP()
# set correct source ip and mac
victim_arp.hwsrc = gateway_mac
victim_arp.psrc = gateway_ip
# broadcast 'is-at' packet
gateway_arp.op = 2
gateway_arp.hwdst = 'ff:ff:ff:ff:ff:ff'
send(gateway_arp)
# create a Layer 3 ARP() packet
gateway_arp = ARP()
# set correct source ip and mac
gateway_arp.hwsrc = victim_mac
gateway_arp.psrc = victim_ip
# broadcast 'is-at' packet
gateway_arp.op = 2
gateway_arp.hwdst = 'ff:ff:ff:ff:ff:ff'
send(gateway_arp)
# sends a legitimate arp request to resolve an IP to a
# mac address
def ip_to_mac(ip, retry=RETRY, timeout=TIMEOUT):
arp = ARP()
# set operation to 'who-has' (arp request)
arp.op = 1
arp.hwdst = 'ff:ff:ff:ff:ff:ff'
arp.pdst = ip
response, unanswered = sr(arp, retry=retry, timeout=timeout)
# get the response from the first packet received by accessing
# layer 2 header
for s,r in response:
return r[ARP].underlayer.src
# return failure
return None
# driver function for arp cache poisoning attack
def poison(configs):
enable_packet_forwarding()
enable_http_redirection()
#poison_victim(configs)
# driver function for restoring victim and gateway after
# arp cache poisoning attack
def antidote(configs):
#restore_victim(configs)
disable_http_redirection()
disable_packet_forwarding()
def main():
configs = set_configs()
print '[*] Using interface', configs['iface']
conf.iface = configs['iface']
try:
poison(configs)
except KeyboardInterrupt:
pass
antidote(configs)
if __name__ == '__main__':
main()
| gpl-3.0 | -362,603,220,197,692,000 | 23.034783 | 107 | 0.611433 | false |
js850/PyGMIN | pygmin/gui/double_ended_connect_runner.py | 1 | 10710 | """
tools to run the double ended connect in a separte process and
make sure the the minima and transition states found are
incorporated back into the master database
"""
import multiprocessing as mp
import sys
import signal
import logging
import numpy as np
from PyQt4 import QtCore, QtGui
from pygmin.utils.events import Signal
class UnboundMinimum(object):
def __init__(self, minimum):
self._id = minimum._id
self.energy = minimum.energy
self.coords = minimum.coords.copy()
class UnboundTransitionState(object):
def __init__(self, ts):
self._id = ts._id
self.energy = ts.energy
self.coords = ts.coords.copy()
self.eigenvec = ts.eigenvec
self.eigenval = ts.eigenval
self._minimum1_id = ts._minimum1_id
self._minimum2_id = ts._minimum2_id
class OutLog(object):
"""for redirecting stdout or stderr
everytime something is written to this object, it is sent through
the pipe `conn`.
from http://www.riverbankcomputing.com/pipermail/pyqt/2009-February/022025.html
"""
def __init__(self, conn):
self.conn = conn
self.message = ""
def write(self, m):
if len(m) > 0:
self.conn.send(("stdout", m))
return
## sys.stderr.write(":sending message:"+ m)
# self.message += m
## if len(self.message) > 100:
## self.flush()
### self.conn.send(("stdout", m))
## if len(self.mes)
# if self.message[-1] == "\n":
# self.flush()
def flush(self):
# self.conn.send(("stdout", self.message))
# self.message = ""
pass
class DECProcess(mp.Process):
"""This object will run in a separate process and will actually do the connect run
when the run is finished the minima and transition states found will be sent
back through the pipe as UnboundMinimum and UnboundTransitionState objects
Parameters
----------
comm : pipe
child end of a mp.Pipe()
system :
min1, min2 :
the minima to try to connect
pipe_stdout : bool
if true log messages will be sent back through the pipe
return_smoothed_path : bool
if the run ends successfully the smoothed path will be sent
back through the pipe
"""
def __init__(self, comm, system, min1, min2, pipe_stdout=True,
return_smoothed_path=True):
mp.Process.__init__(self)
#QtCore.QThread.__init__(self)
self.comm = comm
self.system = system
self.min1, self.min2 = min1, min2
self.pipe_stdout = pipe_stdout
self.return_smoothed_path = return_smoothed_path
self.started = False
self.finished = False
def get_smoothed_path(self):
mints, S, energies = self.connect.returnPath()
clist = [m.coords for m in mints]
smoothpath = self.system.smooth_path(clist)
return smoothpath, S, energies
def test_success(self):
return self.connect.graph.areConnected(self.m1local, self.m2local)
def clean_up(self):
"send the lists of transition states and minima back to the parent process"
minima = [UnboundMinimum(m) for m in self.db.minima()]
tslist = [UnboundTransitionState(ts) for ts in self.db.transition_states()]
self.comm.send(("new coords", minima, tslist))
# return the success status
success = self.test_success()
self.comm.send(("success", success))
if success:
# return the smoothed path, or None if not successful
pathdata = self.get_smoothed_path()
self.comm.send(("smoothed path", pathdata))
# send signal we're done here
self.finished = True
self.comm.send(("finished",))
def terminate_early(self, *args, **kwargs):
sys.stderr.write("caught signal, cleaning up and exiting\n")
if self.started and not self.finished:
sys.stderr.write("starting clean up\n")
self.clean_up()
sys.stderr.write("finished clean up\n")
sys.stderr.write("exiting\n")
sys.exit(0)
def do_double_ended_connect(self):
db = self.system.create_database()
self.db = db
# min1 and min2 are associated with the old database, we need to create
# the minima again using the new database
self.m1local = db.addMinimum(self.min1.energy, self.min1.coords)
self.m2local = db.addMinimum(self.min2.energy, self.min2.coords)
self.started = True
self.connect = self.system.get_double_ended_connect(self.m1local, self.m2local, db,
fresh_connect=True, load_no_distances=True)
self.connect.connect()
def run(self):
signal.signal(signal.SIGTERM, self.terminate_early)
signal.signal(signal.SIGINT, self.terminate_early)
if self.pipe_stdout:
# print >> sys.stderr, "stderr"
self.mylog = OutLog(self.comm)
sys.stdout = self.mylog
logger = logging.getLogger("pygmin")
handles = logger.handlers
for h in handles:
# print >> sys.stderr, "removing handler", h
logger.removeHandler(h)
sh = logging.StreamHandler(self.mylog)
logger.addHandler(sh)
# import pygmin
# logger.removeHandler(pygmin.h)
# print >> sys.stderr, "stderr2"
self.do_double_ended_connect()
self.clean_up()
class DECRunner(QtCore.QObject):
"""Spawn a double ended connect run in a child process
This will spawn a new process and deal with the communication
Parameters
----------
system :
database : Database
The minima and transition states found will be added to the
database after the connect run is finished
min1, min2 : Munimum objects
the minima to try to connect
outstream : an object with attribute `outstream.write(mystring)`
the log messages from the connect run will be redirected here
return_smoothed_path : bool
if True the final smoothed path will be calculated
Attributes
----------
on_finished : Signal
this signal will be called when the connect job is finished
"""
def __init__(self, system, database, min1, min2, outstream=None,
return_smoothed_path=True, daemon=True):
QtCore.QObject.__init__(self)
self.system = system
self.database = database
self.min1, self.min2 = min1, min2
self.return_smoothed_path = return_smoothed_path
self.daemon = daemon
self.outstream = outstream
self.on_finished = Signal()
self.decprocess = None
self.newminima = set()
self.newtransition_states = set()
self.success = False
self.killed_early = False
self.is_running = False
def poll(self):
"""this does the checking in the background to see if any messages have been passed"""
# if not self.decprocess.is_alive():
# self.refresh_timer.stop()
# return
if not self.parent_conn.poll():
return
message = self.parent_conn.recv()
self.process_message(message)
def start(self):
"""start the connect job"""
if(self.decprocess):
if(self.decprocess.is_alive()):
return
parent_conn, child_conn = mp.Pipe()
self.conn = parent_conn
self.parent_conn = parent_conn
self.decprocess = DECProcess(child_conn, self.system, self.min1, self.min2,
pipe_stdout=(self.outstream is not None))
self.decprocess.daemon = self.daemon
self.decprocess.start()
# self.poll_thread = PollThread(self, parent_conn)
# self.poll_thread.start()
self.refresh_timer = QtCore.QTimer()
self.refresh_timer.timeout.connect(self.poll)
self.refresh_timer.start(1.)
self.is_running = True
def add_minima_transition_states(self, new_minima, new_ts):
"""Add the minima and transition states found to the database
convert the UnboundMinimum and UnboundTransitionStates to ones
bound to self.database
"""
print "processing new minima and ts"
self.newminima = set()
self.newtransition_states = set()
old2new = dict()
self.system.params.gui._sort_lists = False
for m in new_minima:
mnew = self.database.addMinimum(m.energy, m.coords)
old2new[m._id] = mnew
self.newminima.add(mnew)
for ts in new_ts:
m1id = ts._minimum1_id
m2id = ts._minimum2_id
m1new = old2new[m1id]
m2new = old2new[m2id]
tsnew = self.database.addTransitionState(ts.energy, ts.coords, m1new,
m2new, eigenval=ts.eigenval,
eigenvec=ts.eigenvec)
self.newtransition_states.add(tsnew)
nmin = len(new_minima)
nts = len(new_ts)
print "finished connect run: adding", nmin, "minima, and", nts, "transition states to database"
self.system.params.gui._sort_lists = True
def terminate_early(self):
self.killed_early = True
self.decprocess.terminate()
print "finished terminating"
self.is_running = False
# self.decprocess.join()
# print "done killing job"
# self.on_finished()
def finished(self):
"""the job is finished, do some clean up"""
self.decprocess.join()
self.decprocess.terminate()
self.decprocess.join()
self.refresh_timer.stop()
# print "done killing job"
self.on_finished()
self.is_running = False
def process_message(self, message):
if message[0] == "stdout":
self.outstream.write(message[1])
elif message[0] == "new coords":
new_minima, new_ts = message[1:]
self.add_minima_transition_states(new_minima, new_ts)
elif message[0] == "success":
self.success = message[1]
elif message[0] == "smoothed path":
pathdata = message[1]
self.smoothed_path, self.S, self.energies = pathdata
elif message[0] == "finished":
self.finished()
| gpl-3.0 | -897,367,410,067,978,000 | 33.111465 | 103 | 0.585528 | false |
Yelp/pyramid_zipkin | pyramid_zipkin/tween.py | 1 | 8038 | # -*- coding: utf-8 -*-
import functools
import warnings
from collections import namedtuple
from py_zipkin import Encoding
from py_zipkin import Kind
from py_zipkin.exception import ZipkinError
from py_zipkin.storage import get_default_tracer
from py_zipkin.transport import BaseTransportHandler
from pyramid_zipkin.request_helper import create_zipkin_attr
from pyramid_zipkin.request_helper import get_binary_annotations
from pyramid_zipkin.request_helper import should_not_sample_path
from pyramid_zipkin.request_helper import should_not_sample_route
def _getattr_path(obj, path):
"""
getattr for a dot separated path
If an AttributeError is raised, it will return None.
"""
if not path:
return None
for attr in path.split('.'):
obj = getattr(obj, attr, None)
return obj
_ZipkinSettings = namedtuple('ZipkinSettings', [
'zipkin_attrs',
'transport_handler',
'service_name',
'span_name',
'add_logging_annotation',
'report_root_timestamp',
'host',
'port',
'context_stack',
'firehose_handler',
'post_handler_hook',
'max_span_batch_size',
'use_pattern_as_span_name',
'encoding',
])
def _get_settings_from_request(request):
"""Extracts Zipkin attributes and configuration from request attributes.
See the `zipkin_span` context in py-zipkin for more detaied information on
all the settings.
Here are the supported Pyramid registry settings:
zipkin.create_zipkin_attr: allows the service to override the creation of
Zipkin attributes. For example, if you want to deterministically
calculate trace ID from some service-specific attributes.
zipkin.transport_handler: how py-zipkin will log the spans it generates.
zipkin.stream_name: an additional parameter to be used as the first arg
to the transport_handler function. A good example is a Kafka topic.
zipkin.add_logging_annotation: if true, the outermost span in this service
will have an annotation set when py-zipkin begins its logging.
zipkin.report_root_timestamp: if true, the outermost span in this service
will set its timestamp and duration attributes. Use this only if this
service is not going to have a corresponding client span. See
https://github.com/Yelp/pyramid_zipkin/issues/68
zipkin.firehose_handler: [EXPERIMENTAL] this enables "firehose tracing",
which will log 100% of the spans to this handler, regardless of
sampling decision. This is experimental and may change or be removed
at any time without warning.
zipkin.use_pattern_as_span_name: if true, we'll use the pyramid route pattern
as span name. If false (default) we'll keep using the raw url path.
"""
settings = request.registry.settings
# Creates zipkin_attrs and attaches a zipkin_trace_id attr to the request
if 'zipkin.create_zipkin_attr' in settings:
zipkin_attrs = settings['zipkin.create_zipkin_attr'](request)
else:
zipkin_attrs = create_zipkin_attr(request)
if 'zipkin.transport_handler' in settings:
transport_handler = settings['zipkin.transport_handler']
if not isinstance(transport_handler, BaseTransportHandler):
warnings.warn(
'Using a function as transport_handler is deprecated. '
'Please extend py_zipkin.transport.BaseTransportHandler',
DeprecationWarning,
)
stream_name = settings.get('zipkin.stream_name', 'zipkin')
transport_handler = functools.partial(transport_handler, stream_name)
else:
raise ZipkinError(
"`zipkin.transport_handler` is a required config property, which"
" is missing. Have a look at py_zipkin's docs for how to implement"
" it: https://github.com/Yelp/py_zipkin#transport"
)
context_stack = _getattr_path(request, settings.get('zipkin.request_context'))
service_name = settings.get('service_name', 'unknown')
span_name = '{0} {1}'.format(request.method, request.path)
add_logging_annotation = settings.get(
'zipkin.add_logging_annotation',
False,
)
# If the incoming request doesn't have Zipkin headers, this request is
# assumed to be the root span of a trace. There's also a configuration
# override to allow services to write their own logic for reporting
# timestamp/duration.
if 'zipkin.report_root_timestamp' in settings:
report_root_timestamp = settings['zipkin.report_root_timestamp']
else:
report_root_timestamp = 'X-B3-TraceId' not in request.headers
zipkin_host = settings.get('zipkin.host')
zipkin_port = settings.get('zipkin.port', request.server_port)
firehose_handler = settings.get('zipkin.firehose_handler')
post_handler_hook = settings.get('zipkin.post_handler_hook')
max_span_batch_size = settings.get('zipkin.max_span_batch_size')
use_pattern_as_span_name = bool(
settings.get('zipkin.use_pattern_as_span_name', False),
)
encoding = settings.get('zipkin.encoding', Encoding.V1_THRIFT)
return _ZipkinSettings(
zipkin_attrs,
transport_handler,
service_name,
span_name,
add_logging_annotation,
report_root_timestamp,
zipkin_host,
zipkin_port,
context_stack,
firehose_handler,
post_handler_hook,
max_span_batch_size,
use_pattern_as_span_name,
encoding=encoding,
)
def zipkin_tween(handler, registry):
"""
Factory for pyramid tween to handle zipkin server logging. Note that even
if the request isn't sampled, Zipkin attributes are generated and pushed
into threadlocal storage, so `create_http_headers_for_new_span` and
`zipkin_span` will have access to the proper Zipkin state.
Consumes custom create_zipkin_attr function if one is set in the pyramid
registry.
:param handler: pyramid request handler
:param registry: pyramid app registry
:returns: pyramid tween
"""
def tween(request):
zipkin_settings = _get_settings_from_request(request)
tracer = get_default_tracer()
tween_kwargs = dict(
service_name=zipkin_settings.service_name,
span_name=zipkin_settings.span_name,
zipkin_attrs=zipkin_settings.zipkin_attrs,
transport_handler=zipkin_settings.transport_handler,
host=zipkin_settings.host,
port=zipkin_settings.port,
add_logging_annotation=zipkin_settings.add_logging_annotation,
report_root_timestamp=zipkin_settings.report_root_timestamp,
context_stack=zipkin_settings.context_stack,
max_span_batch_size=zipkin_settings.max_span_batch_size,
encoding=zipkin_settings.encoding,
kind=Kind.SERVER,
)
# Only set the firehose_handler if it's defined and only if the current
# request is not blacklisted. This prevents py_zipkin from emitting
# firehose spans for blacklisted paths like /status
if zipkin_settings.firehose_handler is not None and \
not should_not_sample_path(request) and \
not should_not_sample_route(request):
tween_kwargs['firehose_handler'] = zipkin_settings.firehose_handler
with tracer.zipkin_span(**tween_kwargs) as zipkin_context:
response = handler(request)
if zipkin_settings.use_pattern_as_span_name and request.matched_route:
zipkin_context.override_span_name('{} {}'.format(
request.method,
request.matched_route.pattern,
))
zipkin_context.update_binary_annotations(
get_binary_annotations(request, response),
)
if zipkin_settings.post_handler_hook:
zipkin_settings.post_handler_hook(request, response)
return response
return tween
| apache-2.0 | 5,364,765,440,840,491,000 | 38.596059 | 82 | 0.673177 | false |
alejob/mdanalysis | package/MDAnalysis/auxiliary/XVG.py | 1 | 11423 | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""
XVG auxiliary reader --- :mod:`MDAnalysis.auxiliary.XVG`
========================================================
xvg files are produced by Gromacs during simulation or analysis, formatted
for plotting data with Grace.
Data is column-formatted; time/data selection is enabled by providing column
indices.
Note
----
By default, the time of each step is assumed to be stored in the first column,
in units of ps.
.. autoclass:: XVGStep
:members:
XVG Readers
-----------
The default :class:`XVGReader` reads and stores the full contents of the .xvg
file on initialisation, while a second reader (:class:`XVGFileReader`) that
reads steps one at a time as required is also provided for when a lower memory
footprint is desired.
Note
----
Data is assumed to be time-ordered.
Multiple datasets, separated in the .xvg file by '&', are currently not
supported (the readers will stop at the first line starting '&').
.. autoclass:: XVGReader
:members:
.. autoclass:: XVGFileReader
:members:
.. autofunction:: uncomment
"""
from six.moves import range
import os
import numpy as np
from . import base
from ..lib.util import anyopen
def uncomment(lines):
""" Remove comments from lines in an .xvg file
Parameters
----------
lines : list of str
Lines as directly read from .xvg file
Yields
------
str
The next non-comment line, with any trailing comments removed
"""
for line in lines:
stripped_line = line.strip()
# ignore blank lines
if not stripped_line:
continue
# '@' must be at the beginning of a line to be a grace instruction
if stripped_line[0] == '@':
continue
# '#' can be anywhere in the line, everything after is a comment
comment_position = stripped_line.find('#')
if comment_position > 0 and stripped_line[:comment_position]:
yield stripped_line[:comment_position]
elif comment_position < 0 and stripped_line:
yield stripped_line
# if comment_position == 0, then the line is empty
class XVGStep(base.AuxStep):
""" AuxStep class for .xvg file format.
Extends the base AuxStep class to allow selection of time and
data-of-interest fields (by column index) from the full set of data read
each step.
Parameters
----------
time_selector : int | None, optional
Index of column in .xvg file storing time, assumed to be in ps. Default
value is 0 (i.e. first column).
data_selector : list of int | None, optional
List of indices of columns in .xvg file containing data of interest to
be stored in ``data``. Default value is ``None``.
**kwargs
Other AuxStep options.
See Also
--------
:class:`~MDAnalysis.auxiliary.base.AuxStep`
"""
def __init__(self, time_selector=0, data_selector=None, **kwargs):
super(XVGStep, self).__init__(time_selector=time_selector,
data_selector=data_selector,
**kwargs)
def _select_time(self, key):
if key is None:
# here so that None is a valid value; just return
return
if isinstance(key, int):
return self._select_data(key)
else:
raise ValueError('Time selector must be single index')
def _select_data(self, key):
if key is None:
# here so that None is a valid value; just return
return
if isinstance(key, int):
try:
return self._data[key]
except IndexError:
raise ValueError('{} not a valid index for data with {} '
'columns'.format(key, len(self._data)))
else:
return np.array([self._select_data(i) for i in key])
class XVGReader(base.AuxReader):
""" Auxiliary reader to read data from an .xvg file.
Detault reader for .xvg files. All data from the file will be read and stored
on initialisation.
Parameters
----------
filename : str
Location of the file containing the auxiliary data.
**kwargs
Other AuxReader options.
See Also
--------
:class:`~MDAnalysis.auxiliary.base.AuxReader`
Note
----
The file is assumed to be of a size such that reading and storing the full
contents is practical.
"""
format = "XVG"
_Auxstep = XVGStep
def __init__(self, filename, **kwargs):
self._auxdata = os.path.abspath(filename)
with anyopen(filename) as xvg_file:
lines = xvg_file.readlines()
auxdata_values = []
# remove comments before storing
for i, line in enumerate(uncomment(lines)):
if line.lstrip()[0] == '&':
# multiple data sets not supported; stop at the end of the first
break
auxdata_values.append([float(l) for l in line.split()])
# check the number of columns is consistent
if len(auxdata_values[i]) != len(auxdata_values[0]):
raise ValueError('Step {0} has {1} columns instead of '
'{2}'.format(i, auxdata_values[i],
auxdata_values[0]))
self._auxdata_values = np.array(auxdata_values)
self._n_steps = len(self._auxdata_values)
super(XVGReader, self).__init__(**kwargs)
def _read_next_step(self):
""" Read next auxiliary step and update ``auxstep``.
Returns
-------
AuxStep object
Updated with the data for the new step.
Raises
------
StopIteration
When end of auxiliary data set is reached.
"""
auxstep = self.auxstep
new_step = self.step + 1
if new_step < self.n_steps:
auxstep._data = self._auxdata_values[new_step]
auxstep.step = new_step
return auxstep
else:
self.rewind()
raise StopIteration
def _go_to_step(self, i):
""" Move to and read i-th auxiliary step.
Parameters
----------
i : int
Step number (0-indexed) to move to
Raises
------
ValueError
If step index not in valid range.
"""
if i >= self.n_steps or i < 0:
raise ValueError("Step index {0} is not valid for auxiliary "
"(num. steps {1})".format(i, self.n_steps))
self.auxstep.step = i-1
self.next()
return self.auxstep
def read_all_times(self):
""" Get list of time at each step.
Returns
-------
list of float
Time at each step.
"""
return self._auxdata_values[:,self.time_selector]
class XVGFileReader(base.AuxFileReader):
""" Auxiliary reader to read (step at a time) from an .xvg file.
An alternative XVG reader which reads each step from the .xvg file as
needed (rather than reading and storing all from the start), for a lower
memory footprint.
Parameters
----------
filename : str
Location of the file containing the auxiliary data.
**kwargs
Other AuxReader options.
See Also
--------
:class:`~MDAnalysis.auxiliary.base.AuxFileReader`
Note
----
The default reader for .xvg files is :class:`XVGReader`.
"""
format = 'XVG-F'
_Auxstep = XVGStep
def __init__(self, filename, **kwargs):
super(XVGFileReader, self).__init__(filename, **kwargs)
def _read_next_step(self):
""" Read next recorded step in xvg file and update ``austep``.
Returns
-------
AuxStep object
Updated with the data for the new step.
Raises
------
StopIteration
When end of file or end of first data set is reached.
"""
line = next(self.auxfile)
while True:
if not line or (line.strip() and line.strip()[0] == '&'):
# at end of file or end of first set of data (multiple sets
# currently not supported)
self.rewind()
raise StopIteration
# uncomment the line
for uncommented in uncomment([line]):
# line has data in it; add to auxstep + return
auxstep = self.auxstep
auxstep.step = self.step + 1
auxstep._data = [float(i) for i in uncommented.split()]
# see if we've set n_cols yet...
try:
auxstep._n_cols
except AttributeError:
# haven't set n_cols yet; set now
auxstep._n_cols = len(auxstep._data)
if len(auxstep._data) != auxstep._n_cols:
raise ValueError('Step {0} has {1} columns instead of '
'{2}'.format(self.step, len(auxstep._data),
auxstep._n_cols))
return auxstep
# line is comment only - move to next
line = next(self.auxfile)
def _count_n_steps(self):
""" Iterate through all steps to count total number.
Returns
-------
int
Total number of steps
"""
if not self.constant_dt:
# check if we've already iterated through to build _times list
try:
return len(self._times)
except AttributeError:
# might as well build _times now, since we'll need to iterate
# through anyway
self._times = self.read_all_times()
return len(self.read_all_times())
else:
# don't need _times; iterate here instead
self._restart()
count = 0
for step in self:
count = count + 1
return count
def read_all_times(self):
""" Iterate through all steps to build times list.
Returns
-------
list of float
Time of each step
"""
self._restart()
times = []
for step in self:
times.append(self.time)
return np.array(times)
| gpl-2.0 | -4,506,006,415,492,942,300 | 30.46832 | 82 | 0.565263 | false |
Cobaltians-Fonts/fontToCobalt | parsers/css.py | 1 | 1486 | #
# Created by Roxane P. on 15/01/2016
# Parse fonts styles.css from fontastic or icomoon
#
import tinycss
names = []
glyphs = []
prefix = ''
fontName = ''
def parseCSS(file):
parser = tinycss.make_parser('page3')
stylesheet = parser.parse_stylesheet_file(file);
global prefix
global fontName
first = True
content = False
for rule in stylesheet.rules:
# get raw glyph and name
glyph = rule.declarations
name = rule.selector.as_css().split(':', 1)[0].replace('.', '')
if first == True:
fontName = glyph[0].value.as_css().replace('\'', '').replace('"', '') # set fontName
first = False
else:
if prefix == '': # we dont have the prefix yet
tmp = rule.selector.as_css().split('-', 1)[0].replace('.', '')
if tmp[0] != '[' and tmp != '':
prefix = tmp # set the prefix we are looking for
if (glyph[0].value.as_css()[1] == '\\'):
content = True # font selector with needed content appeared
if content == True:
glyph = glyph[0].value.as_css().replace('"', '')
glyphs.append(glyph.lower()) # set a glyph in glyphs
if name[0] != '[':
names.append(name.lower()) # set a name in names
def get_names():
return names;
def get_glyphs():
return glyphs;
def get_fontName():
return fontName;
def get_prefix():
return prefix;
| mit | 1,897,860,228,959,300,000 | 30.617021 | 96 | 0.541723 | false |
rosedu/I.GameBot | snake_maze/game_bots/random_bot_02_python/snakemaze_play_turn.py | 1 | 1318 | import random
from copy import deepcopy
DIRECTIONS = ['right', 'left', 'up', 'down']
def locate_free_square(width, height, obstacles):
square = {'x': 0, 'y': 0}
while square in obstacles:
square['x'] = random.choice( range(0,width) )
square['y'] = random.choice( range(0,height) )
return square
def play_turn(width, height, obstacles):
snake = {
'starts_at': locate_free_square(width, height, obstacles),
'shape_segments': []
}
for direction in DIRECTIONS:
next_square = deepcopy( snake['starts_at'] )
# Python doesn't have case statements,
# so the following few IFs are necessarily ugly
if direction == 'right':
next_square['x'] += 1
if direction == 'left':
next_square['x'] -= 1
if direction == 'up':
next_square['y'] -= 1
if direction == 'down':
next_square['y'] += 1
next_square_out_of_bounds = \
next_square['x'] > width - 1 or \
next_square['x'] < 0 or \
next_square['y'] < 0 or \
next_square['y'] > height - 1
if next_square not in obstacles and not next_square_out_of_bounds:
snake['shape_segments'].append(direction)
break
return snake
| agpl-3.0 | -324,057,517,764,576,450 | 22.963636 | 74 | 0.54173 | false |
opevolution/pycrackcaptcha | pycrackcaptcha/data.py | 1 | 1273 | # -*- coding: utf-8 -*-
# vim: set expandtab tabstop=4 shiftwidth=4:
###############################################################
# #
# Alexandre Defendi - 01/04/2014 #
# #
# Módulo Básico - PyCrackCaptcha #
# #
# #
# #
# #
# #
###############################################################
class CCaptchaData(object):
""" Interface para Implementações de Cracks """
def __init__(self, **kwargs):
self._arquivo = kwargs.pop('arquivo', False)
def _get_arquivo(self):
if self._arquivo is not None:
return "%s" % self._arquivo
else:
return False
def _set_arquivo(self, val):
if val:
self._arquivo = val
else:
self._arquivo = False
arquivo = property(_get_arquivo, _set_arquivo) | bsd-3-clause | -5,452,015,869,434,828,000 | 37.484848 | 63 | 0.289204 | false |
mariusbaumann/pyload | module/plugins/hoster/ZeveraCom.py | 1 | 1124 | # -*- coding: utf-8 -*-
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
class ZeveraCom(MultiHoster):
__name__ = "ZeveraCom"
__type__ = "hoster"
__version__ = "0.25"
__pattern__ = r'http://(?:www\.)?zevera\.com/.+'
__description__ = """Zevera.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
def handlePremium(self):
if self.account.getAPIData(self.req, cmd="checklink", olink=self.pyfile.url) != "Alive":
self.fail(_("Offline or not downloadable"))
header = self.account.getAPIData(self.req, just_header=True, cmd="generatedownloaddirect", olink=self.pyfile.url)
if not "location" in header:
self.fail(_("Unable to initialize download"))
self.link = header['location']
def checkFile(self):
super(ZeveraCom, self).checkFile()
if self.checkDownload({"error": 'action="ErrorDownload.aspx'}) is "error":
self.fail(_("Error response received - contact Zevera support"))
getInfo = create_getInfo(ZeveraCom)
| gpl-3.0 | -2,630,775,124,068,375,600 | 30.222222 | 121 | 0.617438 | false |
joemicro/Manufacturing | findform.py | 1 | 31523 | import os
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from sqlalchemy import *
from sqlalchemy.orm import *
from databaseschema import *
from genericdelegates import *
from functions import *
import modelsandviews
import ui_forms.ui_findform
import receiveform
import batchform
import productionform
import inventoryadjform
import reporting
localTITLE = 'Find'
RECEIVE, BATCH, PRODUCTION, ADJUSTMENT, PREP = range(5)
class FilterList(object):
def __init__(self, filter, criteria, setTo):
self.filter = filter
self.criteria = criteria
self.setTo = setTo
class FilterModel(QAbstractTableModel):
def __init__(self, parent=None):
super(FilterModel, self).__init__(parent)
self.records = []
def rowCount(self, index=QModelIndex()):
return len(self.records)
def columnCount(self, index=QModelIndex()):
return 3
def data(self, index, role=Qt.DisplayRole):
if not index.isValid() or not (0 <= index.row() < len(self.records)):
return QVariant()
record = self.records[index.row()]
column = index.column()
if role == Qt.DisplayRole:
if column == 0:
return QVariant(record.filter)
elif column == 1:
return QVariant(record.criteria)
elif column == 2:
return QVariant(record.setTo)
return QVariant()
def setData(self, index, value, role=Qt.EditRole):
if index.isValid() and role == Qt.EditRole:
record = self.records[index.row()]
column = index.column()
if column == 0:
record.filter = value.toString()
elif column == 1:
record.criteria = value.toString()
elif column == 2 :
record.setTo = value.toString()
self.emit(SIGNAL("dataChanged(QModelIndex, QModelIndex)"), index, index)
return True
return False
def insertRows(self, position, object, rows=1, index=QModelIndex()):
self.beginInsertRows(QModelIndex(), position, position + rows - 1)
for row in range(rows):
self.records.insert(position + row + 1, object)
self.endInsertRows()
return True
def removeRows(self, position, rows=1, index=QModelIndex()):
self.beginRemoveRows(QModelIndex(), position, position + rows - 1)
self.records = self.records[:position] + self.records[position + rows:]
self.endRemoveRows()
return True
def getFilterCriteria(self):
records_ = []
for rec in self.records:
crit = rec.filter
records_ += [str(crit)]
return records_
def clear(self):
self.beginResetModel()
self.items = []
self.items.append(ItemAssembly())
self.endResetModel()
#==================================================================
### Form setup ==============
class FindForm(QDialog, ui_forms.ui_findform.Ui_FindForm):
### Initializer ==============
def __init__(self, supplierModel, parent=None):
super(FindForm, self).__init__(parent)
self.setupUi(self)
self.tabWidget.setCurrentIndex(0)
self.session = Session()
self.query = None
self.model = None
self.reportName = QString()
self.fieldList = []
self.columnsToTotal = []
self.proxyModel = QSortFilterProxyModel()
self.myParent = parent
## == Standard tab GUI setup ==
journalList = QStringList()
journalList << 'Receive' << 'Batch' << 'Production' << 'Adjustment' << 'Preparation'
self.journal_combo.addItems(journalList)
self.supCom.setVisible(False)
self.supplierModel = supplierModel
self.supplier_combo = modelsandviews.SupplierComboBox(self.supplierModel)
self.supplier_combo.setMinimumSize(QSize(218, 25))
self.supplier_combo.setMaximumSize(QSize(218, 25))
self.gridLayout.addWidget(self.supplier_combo, 2, 1, 1, 2)
self.batchDesc_lineEdit = QLineEdit()
self.batchDesc_lineEdit.setMinimumSize(QSize(218, 25))
self.batchDesc_lineEdit.setMaximumSize(QSize(218, 85))
self.gridLayout.addWidget(self.batchDesc_lineEdit, 2, 1, 1, 2)
self.batchDesc_lineEdit.setVisible(False)
validator = QDoubleValidator()
validator.StandardNotation
self.amountHi_lineEdit.setValidator(validator)
self.amountLow_lineEdit.setValidator(validator)
self.dtcom.setVisible(False)
self.dateRange_combo = modelsandviews.DateRangeComboBox(self.layoutWidget)
self.dateRange_combo.setCurrentIndex(0)
self.dateRange_combo.setMinimumSize(QSize(96, 25))
self.dateRange_combo.setMaximumSize(QSize(96, 25))
self.gridLayout.addWidget(self.dateRange_combo, 4, 1, 1, 1)
self.dateLow_dateEdit.setDate(QDate.currentDate())
self.dateHi_dateEdit.setDate(QDate.currentDate())
self.results_tableView.setSelectionMode(QTableView.SingleSelection)
self.results_tableView.setSelectionBehavior(QTableView.SelectRows)
## == Detail tab GUI setup ==
self.filter_stackedWidget.setCurrentIndex(0)
crtList = QStringList()
crtList << 'Journal Number' << 'Journal ID' << 'Supplier' << 'Items' << 'Description' << 'Journal Type' \
<< 'Item Type' << 'Date' << 'Date Modified'
crtView = self.criteriaList_listWidget
crtView.addItems(crtList)
crtView.setEditTriggers(QListView.NoEditTriggers)
self.filterModel = FilterModel()
fltView = self.criteria_tableView
fltView.setModel(self.filterModel)
fltView.hideColumn(0)
fltView.horizontalHeader().setStretchLastSection(True)
fltView.horizontalHeader().setVisible(False)
fltView.verticalHeader().setVisible(False)
fltView.setSelectionMode(QTableView.SingleSelection)
fltView.setSelectionBehavior(QTableView.SelectRows)
fltView.resizeColumnsToContents()
self.dateLowFilter_dateEdit.setDate(QDate.currentDate())
self.dateHiFilter_dateEdit.setDate(QDate.currentDate())
self.modDateLowFilter_dateEdit.setDate(QDate.currentDate())
self.modDateHiFilter_dateEdit.setDate(QDate.currentDate())
## == stackWidget items setup ==
self.journalStart_lineEdit.setValidator(validator)
self.journalEnd_lineEdit.setValidator(validator)
self.supplier_list = modelsandviews.SupplierListModel()
self.supplierFilter_tableView.setModel(self.supplier_list)
supplier_view = self.supplierFilter_tableView
supplier_view.hideColumn(1)
supplier_view.setColumnWidth(0, 25)
supplier_view.verticalHeader().setVisible(False)
supplier_view.setSelectionMode(QTableView.SingleSelection)
supplier_view.setSelectionBehavior(QTableView.SelectRows)
self.ItemList = modelsandviews.UnionItemListModel()
itemView = self.itemFilter_tableView
itemView.setModel(self.ItemList)
itemView.hideColumn(1)
itemView.verticalHeader().setVisible(False)
itemView.setSelectionMode(QTableView.SingleSelection)
itemView.setSelectionBehavior(QTableView.SelectRows)
itemView.resizeColumnsToContents()
self.journalFilter_combo.addItems(journalList)
self.journalFilter_combo.removeItem(1)
self.journalFilter_combo.setCurrentIndex(-1)
self.bothItemTypeFilter_checkBox.setChecked(True)
self.dtfilcom.setVisible(False)
self.dateFilter_combo = modelsandviews.DateRangeComboBox(self.layoutWidget6)
self.dateFilter_combo.setMinimumSize(QSize(96, 25))
self.dateFilter_combo.setMaximumSize(QSize(96, 25))
self.gridLayout_7.addWidget(self.dateFilter_combo, 1, 1, 1, 1)
self.dtfilcom_2.setVisible(False)
self.modfiedDateFilter_combo = modelsandviews.DateRangeComboBox(self.layoutWidget_10)
self.modfiedDateFilter_combo.setMinimumSize(QSize(96, 25))
self.modfiedDateFilter_combo.setMaximumSize(QSize(96, 25))
self.gridLayout_8.addWidget(self.modfiedDateFilter_combo, 1, 1, 1, 1)
self.amountLow_lineEdit.editingFinished.connect(self.standardAmount)
self.amountHi_lineEdit.editingFinished.connect(self.standardAmount)
self.dateRange_combo.currentIndexChanged.connect(lambda:
self.dateRangeSelection(self.dateRange_combo,
self.dateLow_dateEdit,
self.dateHi_dateEdit))
self.dateFilter_combo.currentIndexChanged.connect(lambda:
self.dateRangeSelection(self.dateFilter_combo,
self.dateLowFilter_dateEdit,
self.dateHiFilter_dateEdit))
self.modfiedDateFilter_combo.currentIndexChanged.connect(lambda:
self.dateRangeSelection(self.modfiedDateFilter_combo,
self.modDateLowFilter_dateEdit,
self.modDateHiFilter_dateEdit))
self.connect(crtView, SIGNAL('currentRowChanged(int)'),
self.filter_stackedWidget, SLOT('setCurrentIndex(int)'))
self.journal_combo.currentIndexChanged.connect(self.layoutChange)
self.findButton.clicked.connect(self.find)
self.editButton.clicked.connect(self.edit)
self.results_tableView.doubleClicked.connect(self.edit)
self.reportButton.clicked.connect(self.printReport)
self.clearButton.clicked.connect(self.clear)
self.closeButton.clicked.connect(self.reject)
## == Setup stackedWidget operations ==
self.journalRef_lineEdit.editingFinished.connect(self.journalNum)
self.journalStart_lineEdit.editingFinished.connect(self.journalIDRange)
self.journalEnd_lineEdit.editingFinished.connect(self.journalIDRange)
self.itemDesc_lineEdit.editingFinished.connect(self.itemDesc)
self.journalFilter_combo.currentIndexChanged.connect(self.journalType)
self.rmFilter_checkBox.stateChanged.connect(self.itemType)
self.fgFilter_checkBox.stateChanged.connect(self.itemType)
self.dateLowFilter_dateEdit.dateChanged.connect(self.dateRange)
self.dateHiFilter_dateEdit.dateChanged.connect(self.dateRange)
self.modDateLowFilter_dateEdit.dateChanged.connect(self.modDateRange)
self.modDateHiFilter_dateEdit.dateChanged.connect(self.modDateRange)
self.removeFilter_button.clicked.connect(self.removeFilter)
itemView.clicked.connect(self.checkItem)
supplier_view.clicked.connect(self.checkItem)
self.setWindowTitle(localTITLE)
def reject(self):
QDialog.reject(self)
self.myParent.formClosed()
def standardAmount(self):
amount_low = str(self.amountLow_lineEdit.text())
amount_hi = str(self.amountHi_lineEdit.text())
if not amount_low:
return
amount_low = float(amount_low)
if not amount_hi:
return
amount_hi = float(amount_hi)
if amount_hi < amount_low:
self.amountLow_lineEdit.setText(str(amount_hi))
self.amountHi_lineEdit.setText(str(amount_low))
def standardDate(self):
fromDate = self.dateLow_dateEdit.date()
fromDate = fromDate.toPyDate()
toDate = self.dateHi_dateEdit.date()
toDate = toDate.toPyDate()
if toDate < fromDate:
self.dateLow_dateEdit.setDate(toDate)
self.dateHi_dateEdit.setDate(fromDate)
## == setup detail filter function calls
def checkItem(self, index):
model = self.supplier_list
if self.sender() == self.itemFilter_tableView:
model = self.ItemList
row = index.row()
i = model.index(row, 0)
if index.model().data(i, Qt.DisplayRole).toString() != 'P':
model.setData(i, QVariant('P'), role=Qt.EditRole)
else:
model.setData(i, QVariant(), role=Qt.EditRole)
def removeFilter(self):
row = self.criteria_tableView.currentIndex().row()
self.filterModel.removeRows(row)
def dateRangeSelection(self, rangeCombo, dateFrom, dateTo):
dateFrom.blockSignals(True)
dateTo.blockSignals(True)
selection = rangeCombo.currentText()
date_from, date_to = dateRange(selection)
dateFrom.setDate(date_from)
dateTo.setDate(date_to)
dateFrom.blockSignals(False)
dateTo.blockSignals(False)
dateFrom.emit(SIGNAL('dateChanged(QDate)'), date_from)
dateTo.emit(SIGNAL('dateChanged(QDate)'), date_to)
def journalNum(self):
fType = 'Journal Num'
start = str(self.journalRef_lineEdit.text())
self.updateFilterModel(fType, 'JournalHeader.journal_no==%s' % start, start)
def journalIDRange(self):
fType = 'Journal ID'
start = str(self.journalStart_lineEdit.text())
if not start:
return
start = int(start)
end = str(self.journalEnd_lineEdit.text())
if not end:
return
end = int(end)
if end < start:
self.journalStart_lineEdit.setText(str(end))
self.journalEnd_lineEdit.setText(str(start))
self.journalIDRange()
return
self.updateFilterModel(fType, 'JournalHeader.journal_id.between(%i,%i)' % (start, end), 'Between(%i,%i)' % (start, end))
def itemDesc(self):
fType = 'Item Description'
desc = str(self.itemDesc_lineEdit.text())
self.updateFilterModel(fType, 'unionQuery.c.itemDesc.ilike("%%%s%%"))' % (desc, desc), desc)
def journalType(self):
fType = 'Journal Type'
jType = str(self.journalFilter_combo.currentText())
crit = 'JournalHeader.journal_type=="%s"' % jType
if jType == 'Receive':
crit = 'or_(JournalHeader.journal_type=="Bill", JournalHeader.journal_type=="Credit")'
self.updateFilterModel(fType, crit, jType)
def itemType(self):
fType = 'Item Type'
rmdType = self.rmFilter_checkBox.isChecked()
fgdType = self.fgFilter_checkBox.isChecked()
if rmdType == 1:
self.updateFilterModel(fType, 'unionQuery.c.itemType=="RMD"', 'Raw Materials')
elif fgdType == 1:
self.updateFilterModel(fType, 'unionQuery.c.itemType=="FGD"', 'Finished Goods')
def dateRange(self):
fType = 'Date range'
fromDate = self.dateLowFilter_dateEdit.date()
fromDate = fromDate.toPyDate()
toDate = self.dateHiFilter_dateEdit.date()
toDate = toDate.toPyDate()
if toDate < fromDate:
self.dateLowFilter_dateEdit.setDate(toDate)
self.dateHiFilter_dateEdit.setDate(fromDate)
self.dateRange()
return
self.updateFilterModel(fType, 'JournalHeader.journal_date.between("%s", "%s")' % (fromDate, toDate),
'Between(%s, %s)' % (fromDate, toDate))
def modDateRange(self):
fType = 'Modified Range'
fromDate = self.modDateLowFilter_dateEdit.date()
fromDate = fromDate.toPyDate()
toDate = self.modDateHiFilter_dateEdit.date()
toDate = toDate.toPyDate()
if toDate < fromDate:
self.modDateLowFilter_dateEdit.setDate(toDate)
self.modDateHiFilter_dateEdit.setDate(fromDate)
self.modDateRange()
return
self.updateFilterModel(fType, 'JournalHeader.modified_date.between("%s", "%s")' % (fromDate, toDate),
'Between(%s, %s)' % (fromDate, toDate))
def updateFilterModel(self, fType, filter, setTo):
index = self.filterModel.index(0, 1)
m = self.filterModel.match(index, Qt.DisplayRole, QVariant(fType), 1)
if len(m) <= 0:
position = self.ItemList.rowCount() + 1
self.filterModel.insertRows(position, FilterList(QString(filter), QString(fType), QString(setTo)))
else:
for i in m:
row = i.row()
index = self.filterModel.index(row, 0)
self.filterModel.setData(index, QVariant(filter), Qt.EditRole)
index = self.filterModel.index(row, 2)
self.filterModel.setData(index, QVariant(setTo), Qt.EditRole)
self.criteria_tableView.resizeColumnsToContents()
## == Form layout setup
def layoutChange(self):
jType = self.journal_combo.currentIndex()
if jType == RECEIVE:
self.supplier_combo.setVisible(True)
self.supplier_label.setVisible(True)
self.amount_label.setVisible(True)
self.amountLow_lineEdit.setVisible(True)
self.amount_and_label.setVisible(True)
self.amountHi_lineEdit.setVisible(True)
self.batchDesc_lineEdit.setVisible(False)
self.supplier_label.setText('Supplier')
elif jType == BATCH:
self.supplier_combo.setVisible(False)
self.amount_label.setVisible(False)
self.amountLow_lineEdit.setVisible(False)
self.amount_and_label.setVisible(False)
self.amountHi_lineEdit.setVisible(False)
self.batchDesc_lineEdit.setVisible(True)
self.supplier_label.setText('Description')
elif jType in (PRODUCTION, ADJUSTMENT, PREP):
self.supplier_combo.setVisible(False)
self.amount_label.setVisible(False)
self.amountLow_lineEdit.setVisible(False)
self.amount_and_label.setVisible(False)
self.amountHi_lineEdit.setVisible(False)
self.batchDesc_lineEdit.setVisible(False)
self.supplier_label.setVisible(False)
def getDate(self):
if self.dateRange_combo.currentText() == 'All':
return ("", "")
else:
date_low = self.dateLow_dateEdit.date()
date_low = date_low.toPyDate()
date_hi = self.dateHi_dateEdit.date()
date_hi = date_hi.toPyDate()
dateTupple = (date_low, date_hi)
return dateTupple
## == Form operations
def find(self):
if self.tabWidget.currentIndex() == 0:
self.standardFind()
elif self.tabWidget.currentIndex() == 1:
self.detailFind()
def standardFind(self):
jType = self.journal_combo.currentIndex()
journal_no = str(self.number_lineEdit.text())
supplier_id = dLookup(Suppliers.supplier_id, Suppliers.supplier_name==str(self.supplier_combo.currentText()))
amount_low = str(self.amountLow_lineEdit.text())
amount_hi = str(self.amountHi_lineEdit.text())
batch_desc = str(self.batchDesc_lineEdit.text())
date_low, date_hi = self.getDate()
if jType == RECEIVE:
journalNo_filter = ReceiveHeader.journal_no.ilike('%%%s%%' % journal_no) \
if journal_no else ""
supplierId_filter = ReceiveHeader.supplier_id == supplier_id \
if supplier_id else ""
if amount_low and amount_hi:
amount_low = int(amount_low)
amount_hi = int(amount_hi)
amount_filter = ReceiveHeader.journal_total.between(amount_low, amount_hi)
elif not amount_low or not amount_hi:
amount_filter = ""
if self.dateRange_combo.currentText() == 'All':
date_filter = ""
elif not self.dateRange_combo.currentText() == 'All':
date_filter = ReceiveHeader.journal_date.between(date_low, date_hi)
self.query = self.session.query(ReceiveHeader).filter(or_(ReceiveHeader.journal_type=='Bill', ReceiveHeader.journal_type=='Credit')) \
.filter(journalNo_filter).filter(supplierId_filter).filter(amount_filter).filter(date_filter)
self.fieldList = [('ID', 'journal_id', 50, 'string'), ('Type', 'journal_type', 50, 'string'), ('No', 'journal_no', 75, 'string'),
('Date', 'journal_date', 150, 'date'), ('Supplier', 'supplier_name', 150, 'string'),
('Amount', 'journal_total', 50, 'number'), ('Modified', 'modified_date', 150, 'date'),
('Memo', 'journal_memo', 150, 'string')]
self.reportName = 'Receiving List'
self.columnsToTotal = [(5,)]
elif jType == BATCH:
journalNo_filter = or_(BatchHeader.batch_id==journal_no, BatchHeader.base_no==journal_no) \
if journal_no else ""
batchDesc_filter = BatchHeader.base_desc == batch_desc \
if batch_desc else ""
if self.dateRange_combo.currentText() == 'All':
date_filter = ""
elif not self.dateRange_combo.currentText() == 'All':
date_filter = BatchHeader.batch_date.between(date_low, date_hi)
self.query = self.session.query(BatchHeader).filter(journalNo_filter).filter(batchDesc_filter).filter(date_filter)
self.fieldList = [('ID', 'batch_id', 50, 'string'), ('Base No.', 'base_no', 50, 'string'), ('Date', 'batch_date', 75, 'date'),
('Journal', 'journal_id', 50, 'string'), ('Memo', 'batch_memo', 150, 'string')]
self.reportName = 'Batch List'
self.columnsToTotal = []
elif jType == PRODUCTION:
journalNo_filter = or_(ProductionHeader.journal_no.ilike('%%%s%%' % journal_no),
ProductionHeader.journal_id == journal_no) \
if journal_no else ""
if self.dateRange_combo.currentText() == 'All':
date_filter = ""
elif not self.dateRange_combo.currentText() == 'All':
date_filter = ProductionHeader.journal_date.between(date_low, date_hi)
self.query = self.session.query(ProductionHeader).filter(journalNo_filter).filter(date_filter)
self.fieldList = [('ID', 'journal_id', 50, 'string'), ('Production No', 'journal_id', 50, 'string'),
('Ref No', 'journal_no', 50, 'string'), ('Date', 'journal_date', 75, 'date'),
('Modified', 'modified_date', 150, 'date'), ('Memo', 'journal_memo', 150, 'string')]
self.reportName = 'Production List'
self.columnsToTotal = []
elif jType == ADJUSTMENT:
journalNo_filter = or_(AdjustmentHeader.journal_no.ilike('%%%s%%' % journal_no),
AdjustmentHeader.journal_id == journal_no) \
if journal_no else ""
if self.dateRange_combo.currentText() == 'All':
date_filter = ""
elif not self.dateRange_combo.currentText() == 'All':
date_filter = AdjustmentHeader.journal_date.between(date_low, date_hi)
self.query = self.session.query(AdjustmentHeader).filter(journalNo_filter).filter(date_filter)
self.fieldList = [('ID', 'journal_id', 0, 'string'), ('No', 'journal_id', 50 ,'string'), ('Date', 'journal_date', 75, 'date'),
('Modified', 'modified_date', 150, 'date'), ('Memo', 'journal_memo', 150, 'string')]
self.reportName = 'Adjustment List'
self.columnsToTotal = []
elif jType == PREP:
journalNo_filter = PrepHeader.prep_id.ilike('%%%s%%' % journal_no) if journal_no else ""
if self.dateRange_combo.currentText() == 'All':
date_filter = ""
elif not self.dateRange_combo.currentText() == 'All':
date_filter = AdjustmentHeader.journal_date.between(date_low, date_hi)
self.query = self.session.query(PrepHeader).filter(journalNo_filter).filter(date_filter)
self.fieldList = [('ID', 'prep_id', 0, 'string'), ('Date', 'prep_date', 75, 'date'), ('Memo', 'prep_memo', 150, 'string')]
self.reportName = 'Preparation List'
self.columnsToTotal = []
self.populateView()
def detailFind(self):
rmd_list = self.session.query(RMD.journal_id, (RMD.bom_id).label('itemID'), (RMD.total / RMD.qty).label('rmdCost'),
(BOM.bom_no).label('itemNo'), (BOM.bom_desc).label('itemDesc'),
BOM.supplier_id.label('supplierId'), JournalHeader.journal_id, JournalHeader.journal_no,
JournalHeader.journal_date, JournalHeader.journal_type, literal_column('"RMD"').label('itemType')) \
.join(BOM).join(JournalHeader)
fgd_list = self.session.query(FGD.journal_id, (FGD.item_id).label('itemID'), FGD.cost, (Items.item_no).label('itemNo'),
(Items.item_desc).label('itemDesc'), literal_column('"AW Products"').label('supplierId'),
JournalHeader.journal_id, JournalHeader.journal_no, JournalHeader.journal_date,
JournalHeader.journal_type, literal_column('"FGD"').label('itemType')) \
.join(Items).join(JournalHeader)
unionQuery = rmd_list.union(fgd_list).subquery()
query = self.session.query(unionQuery).join(JournalHeader)
itemCrit = self.ItemList.getList()
itemLine = ''.join(i for i in itemCrit)[:-2]
itemFilter = "or_(%s)" % itemLine
query = query.filter(eval(itemFilter)) if itemCrit else query
supCrit = self.supplier_list.getList()
supLine = ''.join(i for i in supCrit)[:-2]
supFilter = "or_(%s)" % supLine
query = query.filter(eval(supFilter)) if supCrit else query
critList = self.filterModel.getFilterCriteria()
for crit in critList:
query = query.filter(eval(crit))
self.fieldList = [('ID', 'journal_id', 25, 'string'), ('Journal', 'journal_type', 70, 'string'), ('No', 'journal_no', 75, 'string'),
('Date', 'journal_date', 75, 'date'), ('Item', 'item_no', 50, 'string'),
('Description', 'item_desc', 200, 'string'), ('Cost', 'item_cost', 50, 'number')]
self.reportName = 'Detail Find List'
self.columnsToTotal = []
self.query = []
for i in query:
journal_id = i[0]
item_no = i[3]
item_desc = i[4]
item_cost = nonZero(i[2], 0)
journal_no = i[7]
journal_date = i[8]
journal_type = i[9]
self.query += [DetailFind(journal_id, item_no, item_desc, item_cost, journal_no, journal_date, journal_type)]
self.populateView()
def populateView(self):
self.model = modelsandviews.FindResultModel(self.fieldList)
self.model.load(self.query)
self.proxyModel.setSourceModel(self.model)
self.results_tableView.setModel(self.proxyModel)
self.results_tableView.setSortingEnabled(True)
self.v_results_label.setText('%s - Results' % len(self.model.results))
self.resizeView()
def resizeView(self):
self.results_tableView.resizeColumnsToContents()
self.results_tableView.horizontalHeader().setStretchLastSection(True)
# self.results_tableView.setColumnHidden(0, True)
def edit(self):
if not self.model:
return
jType = self.journal_combo.currentIndex()
row = self.results_tableView.currentIndex().row()
recordIndex = self.proxyModel.index(row, 0)
recordID = self.proxyModel.data(recordIndex).toInt()[0]
self.editTransaction(jType, recordID)
def editTransaction(self, jType, recordID):
if jType == RECEIVE:
form = self.myParent.receiveForm()
form.recall(recordID)
elif jType == BATCH:
form = self.myParent.batchForm()
form.recall(1, recordID)
elif jType == PRODUCTION:
form = self.myParent.productionForm()
form.recall(recordID)
elif jType == ADJUSTMENT:
form = self.myParent.invAdjustment()
form.recall(recordID)
elif jType == PREP:
form = self.myParent.prodprepForm()
form.recall(recordID)
def clear(self):
widgets = self.findChildren(QWidget)
for widget in widgets:
if isinstance(widget, (QLineEdit, QTextEdit)):
widget.clear()
elif isinstance(widget, QComboBox):
widget.setCurrentIndex(-1)
elif isinstance(widget, QCheckBox):
widget.setChecked(False)
elif isinstance(widget, QLabel):
if widget.objectName()[:2] == 'v_':
widget.clear()
self.dateRange_combo.setCurrentIndex(0)
self.dateFilter_combo.setCurrentIndex(0)
if self.model is not None:
self.model.clear()
def printReport(self):
if not self.model:
return
reportModel = reporting.ReportModel('Simple List')
self.refreshReport(reportModel)
report_type = 'trans_header_report' if self.tabWidget.currentIndex() == 0 else 'trans_detail_report'
self.myParent.reportForm(reportModel, self, report_type)
def refreshReport(self, model, report=None):
fromDate, toDate = self.getDate()
if fromDate and toDate:
period = 'From %s To %s.' % (fromDate, toDate)
elif toDate:
period = 'As of %s.' % toDate
else:
period = 'All available dates.'
model.load(self.reportName, period, self.query, self.fieldList, self.columnsToTotal)
def formClosed(self):
self.myParent.formClosed()
if __name__ == '__main__':
app = QApplication(sys.argv)
setupDatabase("Production.sqlite")
supModel = modelsandviews.SupplierModel()
# itmModel = modelsandviews.ItemModel()
# bsModel = modelsandviews.BaseListModel()
form = FindForm(supModel)
form.show()
app.exec_()
| mit | 5,236,492,352,790,907,000 | 44.886463 | 146 | 0.586207 | false |
sharmaking/PairTradeBTS | multipleStrategy/statisticalArbitrageMultiple.py | 1 | 4640 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#statisticalArbitrageMultiple.py
import baseMultiple
import numpy, copy, csv
class CStatisticalArbitrageMultiple(baseMultiple.CBaseMultiple):
#------------------------------
#继承重载函数
#------------------------------
#自定义初始化函数
def customInit(self):
self.name = "statisticalArbitrageMultiple"
self.parameters = []
self.loadPara()
#行情数据触发函数
def onRtnMarketData(self, data):
#计算S
self.countS(data)
pass
def dayEnd(self):
pass
#自动保存缓存触发函数
def autosaveCache(self):
self.saveCache(parameters = self.parameters)
pass
#----------------------
#实现函数体
#----------------------
def loadPara(self):
reader = csv.reader(open("filtPara.csv"))
for line in reader:
self.parameters.append({
"stocks" : [line[0][:6], line[0][7:13]],
"Beta" : float(line[1]),
"Mean" : float(line[2]),
"STD" : float(line[3]),
"OPEN" : float(line[5]),
"CLOSE" : float(line[6]),
"ODD" : float(line[7]),
"staute" : 0,
"tradeType" : [None, None],
"price" : [0,0],
"S" : (0,0)
})
def countS(self, data):
for parameter in self.parameters:
Pa = self.getStockPrice(parameter["stocks"][0])
Pb = self.getStockPrice(parameter["stocks"][1])
if Pa and Pb:
St = numpy.log(Pa) - parameter["Beta"]*numpy.log(Pb)
S = (St - parameter["Mean"])/parameter["STD"]
parameter["price"] = [Pa, Pb]
parameter["S"] = (data["dateTime"], S)
#self.sendS(S, parameter["stocks"][0], data["dateTime"], Pa, Pb)
self.countTrade(parameter, S)
def getStockPrice(self, stockCode):
if self.actuatorDict[stockCode].signalObjDict["baseSignal"].MDList:
return copy.copy(self.actuatorDict[stockCode].signalObjDict["baseSignal"].MDList[-1]["close"])
return None
def countTrade(self, parameter, S):
if parameter["staute"] == 0: #还没开仓
if S > parameter["OPEN"]:
self.openTrade(parameter, True) #正
elif S < -parameter["OPEN"]:
self.openTrade(parameter, False) #反
elif parameter["staute"] == 1: #已经开仓
if parameter["tradeType"][0] == "Sell": #正
if S < parameter["CLOSE"]: #平
self.closeTrade(parameter)
if S > parameter["ODD"]: #止损
self.stopLossTrade(parameter)
elif parameter["tradeType"][0] == "Buy": #反
if S > -parameter["CLOSE"]: #平
self.closeTrade(parameter)
if S < -parameter["ODD"]: #止损
self.stopLossTrade(parameter)
if parameter["staute"] != 2:
if S > parameter["ODD"] or S < -parameter["ODD"]:
self.stopLossTrade(parameter)
def sendS(self, S, stockCode, dateTime, Pa, Pb):
self.sendMessageToClient("0_%s_%s_%s_%f_%f"%(stockCode, str(S)[:6], dateTime, Pa, Pb))
def openTrade(self, parameter, isTrue):
parameter["staute"] = 1
if isTrue: #正
parameter["tradeType"] = ["Sell", "Buy"]
else: #反
parameter["tradeType"] = ["Buy", "Sell"]
self.sendMessageToClient("%s-%s,%s,Open:,%s,%s,%s, %s,%s,%s"%(
parameter["stocks"][0], parameter["stocks"][1], str(parameter["S"][0]),
parameter["stocks"][0], parameter["tradeType"][0], parameter["price"][0],
parameter["stocks"][1], parameter["tradeType"][1], parameter["price"][1]))
def closeTrade(self, parameter):
self.sendMessageToClient("%s-%s,%s,Close:,%s,%s,%s, %s,%s,%s"%(
parameter["stocks"][0], parameter["stocks"][1], str(parameter["S"][0]),
parameter["stocks"][0], parameter["tradeType"][1], parameter["price"][0],
parameter["stocks"][1], parameter["tradeType"][0], parameter["price"][1]))
parameter["staute"] = 0
parameter["tradeType"] = [None, None]
def stopLossTrade(self, parameter):
self.sendMessageToClient("%s-%s,%s,StopLoss:,%s,%s,%s, %s,%s,%s"%(
parameter["stocks"][0], parameter["stocks"][1], str(parameter["S"][0]),
parameter["stocks"][0], parameter["tradeType"][1],parameter["price"][0],
parameter["stocks"][1], parameter["tradeType"][0],parameter["price"][1]))
parameter["staute"] = 2
parameter["tradeType"] = [None, None]
def exceptionTrade(self, parameter):
self.sendMessageToClient("%s-%s,%s,StopLoss:,%s,%s,%s, %s,%s,%s"%(
parameter["stocks"][0], parameter["stocks"][1], str(parameter["S"][0]),
parameter["stocks"][0], parameter["tradeType"][1],parameter["price"][0],
parameter["stocks"][1], parameter["tradeType"][0],parameter["price"][1]))
parameter["staute"] = 2
parameter["tradeType"] = [None, None]
def sendMessageToClient(self, string):
print self.MDList[-1]["dateTime"], string
logFile = open("tradePointsFinal.csv", "a")
content = string + "\n"
logFile.write(content)
logFile.close()
pass
| mit | -3,109,636,028,187,212,000 | 36.371901 | 97 | 0.622512 | false |
wackou/bts_tools | bts_tools/cmdline.py | 1 | 24237 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# bts_tools - Tools to easily manage the bitshares client
# Copyright (c) 2014 Nicolas Wack <wackou@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from os.path import join, dirname, exists, islink, expanduser, basename
from argparse import RawTextHelpFormatter
from contextlib import suppress
from pathlib import Path
from ruamel import yaml
from .core import (platform, run, get_data_dir, get_bin_name, get_gui_bin_name, get_cli_bin_name,
get_all_bin_names, get_full_bin_name, hash_salt_password)
from .privatekey import PrivateKey
from . import core, init
from .rpcutils import rpc_call, GrapheneClient
import argparse
import os
import sys
import copy
import shutil
import pendulum
import inspect
import logging
log = logging.getLogger(__name__)
BUILD_ENV = None
CLIENT = None
def select_build_environment(env_name):
log.info("Using build environment '%s' on platform: '%s'" % (env_name, platform))
if platform not in ['linux', 'darwin']:
raise OSError('OS not supported yet, please submit a patch :)')
try:
env = copy.copy(core.config['build_environments'][env_name])
except KeyError:
log.error('Unknown build environment: %s' % env_name)
sys.exit(1)
env['name'] = env_name
env['build_dir'] = expanduser(env['build_dir'])
env['bin_dir'] = expanduser(env['bin_dir'])
env['witness_filename'] = env.get('witness_filename', get_bin_name(build_env=env_name))
env['wallet_filename'] = env.get('wallet_filename', get_cli_bin_name(build_env=env_name))
env['gui_bin_name'] = env.get('gui_bin_name', get_gui_bin_name(build_env=env_name))
global BUILD_ENV
BUILD_ENV = env
return env
def select_client(client):
log.info("Running '%s' client" % client)
try:
env = copy.copy(core.config['clients'][client])
env['name'] = client
except KeyError:
log.error('Unknown client: %s' % client)
sys.exit(1)
select_build_environment(env['type'])
env['home_dir'] = get_data_dir(client)
global CLIENT
CLIENT = env
return env
def is_valid_environment(env):
return (env in core.config['build_environments'] or
env in core.config['clients'])
def clone():
def is_git_dir(path):
try:
run('git rev-parse', run_dir=BUILD_ENV['build_dir'], verbose=False)
return True
except RuntimeError:
return False
if not exists(BUILD_ENV['build_dir']) or not is_git_dir(BUILD_ENV['build_dir']):
run('git clone %s "%s"' % (BUILD_ENV['git_repo'], BUILD_ENV['build_dir']))
def clean_config():
run('rm -f CMakeCache.txt')
CONFIGURE_OPTS = []
if platform == 'darwin':
# assumes openssl and qt5 installed from brew
CONFIGURE_OPTS = ['PATH=%s:$PATH' % '/usr/local/opt/qt5/bin',
'PKG_CONFIG_PATH=%s:$PKG_CONFIG_PATH' % '/usr/local/opt/openssl/lib/pkgconfig']
def configure(debug=False):
cmake_opts = []
boost_root = BUILD_ENV.get('boost_root')
if boost_root:
cmake_opts += ['-DBOOST_ROOT="{}"'.format(expanduser(boost_root))]
if debug:
# do not compile really in debug, it's unusably slow otherwise
cmake_opts += ['-DCMAKE_BUILD_TYPE=RelWithDebInfo']
else:
cmake_opts += ['-DCMAKE_BUILD_TYPE=Release']
cmake_opts += core.config['build_environments'].get('cmake_args', []) + BUILD_ENV.get('cmake_args', [])
run('{} cmake {} .'.format(' '.join(CONFIGURE_OPTS),
' '.join(cmake_opts)), shell=True)
def configure_gui():
run('%s cmake -DINCLUDE_QT_WALLET=ON .' % ' '.join(CONFIGURE_OPTS))
def build(threads=None):
make_list = ['make'] + core.config['build_environments'].get('make_args', []) + BUILD_ENV.get('make_args', [])
if threads:
make_list.append('-j%d' % threads)
run(make_list)
def build_gui():
# FIXME: need to make sure that we run once: npm install -g lineman
run('rm -fr programs/qt_wallet/htdocs')
run('cd programs/web_wallet; npm install')
run('make buildweb') # TODO: is 'make forcebuildweb' needed?
build()
def install_last_built_bin():
# install into bin dir
date = run('git show -s --format=%ci HEAD', capture_io=True, verbose=False).stdout.split()[0]
branch = run('git rev-parse --abbrev-ref HEAD', capture_io=True, verbose=False).stdout.strip()
commit = run('git log -1', capture_io=True, verbose=False).stdout.splitlines()[0].split()[1]
# find a nice filename representation
def decorated_filename(filename):
try:
r = run('git describe --tags %s' % commit, capture_io=True, verbose=False, log_on_fail=False)
if r.status == 0:
# we are on a tag, use it for naming binary
tag = r.stdout.strip().replace('/', '_')
bin_filename = '%s_%s_%s' % (filename, date, tag)
else:
bin_filename = '%s_%s_%s_%s' % (filename, date, branch, commit[:8])
except RuntimeError:
# no tag yet in repo
bin_filename = '%s_%s_%s_%s' % (filename, date, branch, commit[:8])
return bin_filename
def install(src, dst):
print('Installing %s to %s' % (basename(dst), BUILD_ENV['bin_dir']))
if islink(src):
result = join(dirname(src), os.readlink(src))
print('Following symlink %s -> %s' % (src, result))
src = result
dst = join(BUILD_ENV['bin_dir'], basename(dst))
shutil.copy(src, dst)
return dst
def install_and_symlink(binary_type, bin_name):
"""binary_type should be either 'witness' or 'wallet'
bin_name is the base name template that will be used to name the resulting file."""
if binary_type == 'witness':
bin_index = 0
elif binary_type == 'wallet':
bin_index = 1
else:
raise ValueError('binary_type needs to be either "witness" or "wallet"')
client = join(BUILD_ENV['build_dir'], 'programs', get_all_bin_names(build_env=BUILD_ENV['name'])[bin_index])
bin_filename = decorated_filename(bin_name)
c = install(client, bin_filename)
last_installed = join(BUILD_ENV['bin_dir'], basename(bin_name))
with suppress(Exception):
os.unlink(last_installed)
os.symlink(c, last_installed)
if not exists(BUILD_ENV['bin_dir']):
os.makedirs(BUILD_ENV['bin_dir'])
install_and_symlink('witness', BUILD_ENV['witness_filename'])
install_and_symlink('wallet', BUILD_ENV['wallet_filename'])
def main(flavor='bts'):
# parse commandline args
DESC_COMMANDS = """following commands are available:
- version : show version of the tools
- clean_homedir : clean home directory. WARNING: this will delete your wallet!
- save_blockchain_dir : save a snapshot of the current state of the blockchain
- restore_blockchain_dir : restore a snapshot of the current state of the blockchain
- clean : clean build directory
- build : update and build {bin} client
- build_gui : update and build {bin} gui client
- run : run latest compiled {bin} client, or the one with the given hash or tag
- run_cli : run latest compiled {bin} cli wallet
- run_gui : run latest compiled {bin} gui client
- list : list installed {bin} client binaries
- monitor : run the monitoring web app
- deploy : deploy built binaries to a remote server
- deploy_node : full deploy of a seed or witness node on given ip address. Needs ssh root access
"""
COMMAND_PLUGINS = {name: core.get_plugin('bts_tools.commands', name)
for name in core.list_valid_plugins('bts_tools.commands')}
DESC_PLUGINS = '\n'.join(' - {:22} : {}'.format(name, plugin.short_description())
for name, plugin in COMMAND_PLUGINS.items())
DESC_EXAMPLES = """
Examples:
$ {bin} build # build the latest {bin} client by default
$ {bin} build v0.4.27 # build specific version
$ {bin} build ppy-dev v0.1.8 # build a specific client/version
$ {bin} run # run the latest compiled client by default
$ {bin} run seed-test # clients are defined in the config.yaml file
$ {bin} build_gui # FIXME: broken...
$ {bin} run_gui # FIXME: broken...
"""
DESC = (DESC_COMMANDS + DESC_PLUGINS + DESC_EXAMPLES).format(bin=flavor)
EPILOG="""You should also look into ~/.bts_tools/config.yaml to tune it to your liking."""
parser = argparse.ArgumentParser(description=DESC, epilog=EPILOG,
formatter_class=RawTextHelpFormatter)
parser.add_argument('command', choices=['version', 'clean_homedir', 'clean', 'build', 'build_gui',
'run', 'run_cli', 'run_gui', 'list', 'monitor',
'deploy', 'deploy_node'] + list(COMMAND_PLUGINS.keys()),
help='the command to run')
parser.add_argument('environment', nargs='?',
help='the build/run environment (bts, steem, ...)')
parser.add_argument('-p', '--pidfile', action='store',
help='filename in which to write PID of child process')
parser.add_argument('-f', '--forward-signals', action='store_true',
help='forward unix signals to spawned witness client child process')
parser.add_argument('args', nargs='*',
help='additional arguments to be passed to the given command')
args = parser.parse_args()
if args.command == 'version':
log.info('Version: %s', core.VERSION)
return
init()
if args.environment is None:
args.environment = flavor
elif args.environment == 'dev':
args.environment = '%s-dev' % flavor
# if given env is not valid, we want to use it as second argument, using
# the default environment as working env
if not is_valid_environment(args.environment):
args.args = [args.environment] + args.args
args.environment = flavor
# FIXME: this needs to be implemented as plugins
if args.command in {'build', 'build_gui'}:
select_build_environment(args.environment)
clone()
os.chdir(BUILD_ENV['build_dir'])
run('git fetch --all')
tag = args.args[0] if args.args else None
nthreads = None
# if we specified -jXX, then it's not a tag, it's a thread count for compiling
if tag and tag.startswith('-j'):
nthreads = int(tag[2:])
tag = None
if tag:
run('git checkout %s' % tag)
else:
r = run('git checkout %s' % BUILD_ENV['git_branch'])
if r.status == 0:
run('git pull')
run('git submodule update --init --recursive')
clean_config()
start = pendulum.utcnow()
if args.command == 'build':
configure(debug=BUILD_ENV.get('debug', False))
build(nthreads)
install_last_built_bin()
elif args.command == 'build_gui':
configure_gui()
build_gui()
elapsed_seconds = (pendulum.utcnow() - start).in_seconds()
mins = elapsed_seconds // 60
secs = elapsed_seconds % 60
msg = 'Compiled in%s%s' % ((' %d mins' % mins if mins else ''),
(' %d secs' % secs if secs else ''))
log.info(msg)
elif args.command in ['run', 'run_cli']:
client = select_client(args.environment)
run_args = core.config.get('run_args', [])
tag = args.args[0] if args.args else None
if args.command == 'run':
bin_name = BUILD_ENV['witness_filename']
elif args.command == 'run_cli':
bin_name = BUILD_ENV['wallet_filename']
# FIXME: only use tag if it actually corresponds to one
if False: #tag:
# if git rev specified, runs specific version
print('Running specific instance of the %s client: %s' % (flavor, tag))
bin_name = run('ls %s' % join(BUILD_ENV['bin_dir'],
'%s_*%s*' % (bin_name, tag[:8])),
capture_io=True, verbose=False).stdout.strip()
run_args += args.args[1:]
else:
# run last built version
bin_name = join(BUILD_ENV['bin_dir'], bin_name)
run_args += args.args
if args.command == 'run':
data_dir = client.get('data_dir')
if data_dir:
run_args = ['--data-dir', expanduser(data_dir)] + run_args
shared_file_size = client.get('shared_file_size')
if shared_file_size:
run_args = ['--shared-file-size', shared_file_size] + run_args
genesis_file = client.get('genesis_file')
if genesis_file:
run_args += ['--genesis-json', expanduser(genesis_file)]
witness_port = client.get('witness_port')
if witness_port:
run_args += ['--rpc-endpoint=127.0.0.1:{}'.format(witness_port)]
p2p_port = client.get('p2p_port')
if p2p_port:
run_args += ['--p2p-endpoint', '0.0.0.0:{}'.format(p2p_port)]
seed_nodes = client.get('seed_nodes', [])
for node in seed_nodes:
run_args += ['--seed-node', node]
checkpoints = client.get('checkpoints')
if checkpoints:
pass # FIXME: implement me
track_accounts = client.get('track_accounts', [])
if track_accounts:
run_args += ['--partial-operations', 'true']
for account in track_accounts:
run_args += ['--track-account', '"{}"'.format(account)]
plugins = []
apis = []
public_apis = []
roles = client.get('roles', [])
for role in roles:
if role['role'] == 'witness':
plugins.append('witness')
if core.affiliation(client['type']) == 'steem':
private_key = role.get('signing_key')
if private_key:
run_args += ['--witness', '"{}"'.format(role['name']),
'--private-key', '{}'.format(private_key)]
else:
witness_id = role.get('witness_id')
private_key = role.get('signing_key')
if witness_id and private_key:
witness_id = '"{}"'.format(witness_id)
public_key = format(PrivateKey(private_key).pubkey, client['type'])
private_key_pair = '["{}", "{}"]'.format(public_key, private_key)
run_args += ['--witness-id', witness_id,
'--private-key', private_key_pair]
elif role['role'] == 'seed':
apis += ['network_node_api']
elif role['role'] == 'feed_publisher':
apis += ['network_broadcast_api']
elif role['role'] == 'api':
if core.affiliation(client['type']) == 'steem':
plugins += ['account_history', 'follow', 'market_history', 'private_message', 'tags']
public_apis += ['database_api', 'login_api', 'market_history_api', 'tag_api', 'follow_api']
def make_unique(l):
result = []
for x in l:
if x not in result:
result.append(x)
return result
# enabling plugins
if core.affiliation(client['type']) == 'steem':
plugins = plugins or ['witness'] # always have at least the witness plugin
plugins = make_unique(client.get('plugins', plugins))
log.info('Running with plugins: {}'.format(plugins))
for plugin in plugins:
run_args += ['--enable-plugin', plugin]
# enabling api access
if core.affiliation(client['type']) == 'steem':
# always required for working with bts_tools, ensure they are always
# in this order at the beginning (so database_api=0, login_api=1, etc.)
# 'network_broadcast_api' required by the wallet
apis = ['database_api', 'login_api', 'network_node_api', 'network_broadcast_api'] + apis
apis = make_unique(client.get('apis', apis))
public_apis = make_unique(client.get('public_apis', public_apis))
log.info('Running with apis: {}'.format(apis))
log.info('Running with public apis: {}'.format(public_apis))
for api in public_apis:
run_args += ['--public-api', api]
if not public_apis:
# FIXME: it seems like we can't access the public apis anymore if specifying this
pw_hash, salt = hash_salt_password(client['witness_password'])
api_user_str = '{"username":"%s", ' % client['witness_user']
api_user_str += '"password_hash_b64": "{}", '.format(pw_hash)
api_user_str += '"password_salt_b64": "{}", '.format(salt)
allowed_apis_str = ', '.join('"{}"'.format(api) for api in make_unique(apis + public_apis))
api_user_str += '"allowed_apis": [{}]'.format(allowed_apis_str)
api_user_str += '}'
run_args += ['--api-user', api_user_str]
else:
api_access = client.get('api_access')
if api_access:
run_args += ['--api-access', expanduser(api_access)]
run_args += client.get('run_args', [])
elif args.command == 'run_cli':
witness_host = client.get('witness_host', '127.0.0.1')
witness_port = client.get('witness_port')
if witness_port:
run_args += ['--server-rpc-endpoint=ws://{}:{}'.format(witness_host, witness_port)]
run_args += ['--server-rpc-user={}'.format(client['witness_user'])]
run_args += ['--server-rpc-password={}'.format(client['witness_password'])]
wallet_port = client.get('wallet_port')
if wallet_port:
run_args += ['--rpc-http-endpoint=127.0.0.1:{}'.format(wallet_port)]
chain_id = client.get('chain_id')
if chain_id:
run_args += ['--chain-id', chain_id]
run_args += client.get('run_cli_args', [])
if client.get('debug', False):
if platform == 'linux':
# FIXME: pidfile will write pid of gdb, not of the process being run inside gdb...
cmd = ['gdb', '-ex', 'run', '--args', bin_name] + run_args
else:
log.warning('Running with debug=true is not implemented on your platform (%s)' % platform)
cmd = [bin_name] + run_args
else:
cmd = [bin_name] + run_args
# for graphene clients, always cd to data dir first (if defined), this ensures the wallet file
# and everything else doesn't get scattered all over the place
data_dir = get_data_dir(client['name'])
if data_dir:
# ensure it exists to be able to cd into it
with suppress(FileExistsError):
Path(data_dir).mkdir(parents=True)
else:
log.warning('No data dir specified for running {} client'.format(client['name']))
# also install signal handler to forward signals to witness client child process (esp. SIGINT)
pidfile = args.pidfile or client.get('pidfile')
run(cmd, run_dir=data_dir, forward_signals=args.forward_signals, pidfile=pidfile)
elif args.command == 'run_gui':
select_build_environment(args.environment)
if platform == 'darwin':
run('open %s' % join(BUILD_ENV['build_dir'], 'programs/qt_wallet/bin/%s.app' % BUILD_ENV['gui_bin_name']))
elif platform == 'linux':
run(join(BUILD_ENV['build_dir'], 'programs/qt_wallet/bin/%s' % BUILD_ENV['gui_bin_name']))
elif args.command == 'clean':
select_build_environment(args.environment)
print('\nCleaning build directory...')
run('rm -fr "%s"' % BUILD_ENV['build_dir'], verbose=True)
elif args.command == 'clean_homedir':
select_client(args.environment)
print('\nCleaning home directory...')
if not CLIENT['home_dir']:
print('ERROR: The home/data dir has not been specified in the build environment...')
print(' Please check your config.yaml file')
sys.exit(1)
cmd = 'rm -fr "%s"' % CLIENT['home_dir']
if args.environment != 'development':
print('WARNING: you are about to delete your wallet on the real chain.')
print(' you may lose some real money if you do this!...')
print('If you really want to do it, you\'ll have to manually run the command:')
print(cmd)
sys.exit(1)
run(cmd, verbose=True)
elif args.command == 'list':
select_build_environment(args.environment)
print('\nListing built binaries for environment: %s' % args.environment)
run('ls -ltr "%s"' % BUILD_ENV['bin_dir'])
elif args.command == 'monitor':
print('\nLaunching monitoring web app...')
run('python3 -m bts_tools.wsgi')
elif args.command == 'deploy':
if not args.args:
log.error('You need to specify a remote host to deploy to')
sys.exit(1)
from .deploy import deploy # can only import now due to potential circular import
for remote_host in args.args:
deploy(args.environment, remote_host)
elif args.command == 'deploy_node':
select_build_environment(args.environment)
print()
if len(args.args) != 2:
log.error('You need to specify a deployment config file as argument and a host ip or vps provider')
log.error('eg: bts deploy_node deploy_config.yaml 123.123.123.123 # use given host for install')
log.error('eg: bts deploy_node deploy_config.yaml vultr # create a new vps instance')
log.info('You can find an example config file at {}'.format(join(dirname(__file__), 'deploy_config.yaml')))
sys.exit(1)
config_file = args.args[0]
host = args.args[1]
from .deploy import deploy_node # can only import now due to potential circular import
deploy_node(args.environment, config_file, host)
elif args.command in COMMAND_PLUGINS:
cmd = COMMAND_PLUGINS[args.command]
if 'env' in inspect.signature(cmd.run_command).parameters:
cmd.run_command(*args.args, env=args.environment)
else:
cmd.run_command(*args.args)
def main_bts():
return main(flavor='bts')
def main_muse():
return main(flavor='muse')
def main_steem():
return main(flavor='steem')
def main_ppy():
return main(flavor='ppy')
| gpl-3.0 | -5,400,526,556,740,207,000 | 39.666107 | 119 | 0.564096 | false |
unsystemizer/counterparty-cli | counterpartycli/console.py | 1 | 3966 | import os
from prettytable import PrettyTable
from counterpartycli import wallet, util
# TODO: inelegant
def get_view(view_name, args):
if view_name == 'balances':
return wallet.balances(args.address)
elif view_name == 'asset':
return wallet.asset(args.asset)
elif view_name == 'wallet':
return wallet.wallet()
elif view_name == 'pending':
return wallet.pending()
elif view_name == 'getinfo':
return util.api('get_running_info')
elif view_name == 'getrows':
method = 'get_{}'.format(args.table)
if args.filter:
filters = [tuple(f) for f in args.filter]
else:
filters = []
params = {
'filters': filters,
'filterop': args.filter_op,
'order_by': args.order_by,
'order_dir': args.order_dir,
'start_block': args.start_block,
'end_block': args.end_block,
'status': args.status,
'limit': args.limit,
'offset': args.offset
}
return util.api(method, params)
def print_balances(balances):
lines = []
lines.append('')
lines.append('Balances')
table = PrettyTable(['Asset', 'Amount'])
for asset in balances:
table.add_row([asset, balances[asset]])
lines.append(table.get_string())
lines.append('')
print(os.linesep.join(lines))
def print_asset(asset):
lines = []
lines.append('')
lines.append('Informations')
table = PrettyTable(header=False, align='l')
table.add_row(['Asset Name:', asset['asset']])
table.add_row(['Asset ID:', asset['asset_id']])
table.add_row(['Divisible:', asset['divisible']])
table.add_row(['Locked:', asset['locked']])
table.add_row(['Supply:', asset['supply']])
table.add_row(['Issuer:', asset['issuer']])
table.add_row(['Description:', '‘' + asset['description'] + '’'])
table.add_row(['Balance:', asset['balance']])
lines.append(table.get_string())
if asset['addresses']:
lines.append('')
lines.append('Addresses')
table = PrettyTable(['Address', 'Balance'])
for address in asset['addresses']:
balance = asset['addresses'][address]
table.add_row([address, balance])
lines.append(table.get_string())
if asset['sends']:
lines.append('')
lines.append('Sends')
table = PrettyTable(['Type', 'Quantity', 'Source', 'Destination'])
for send in asset['sends']:
table.add_row([send['type'], send['quantity'], send['source'], send['destination']])
lines.append(table.get_string())
lines.append('')
print(os.linesep.join(lines))
def print_wallet(wallet):
lines = []
for address in wallet['addresses']:
table = PrettyTable(['Asset', 'Balance'])
for asset in wallet['addresses'][address]:
balance = wallet['addresses'][address][asset]
table.add_row([asset, balance])
lines.append(address)
lines.append(table.get_string())
lines.append('')
total_table = PrettyTable(['Asset', 'Balance'])
for asset in wallet['assets']:
balance = wallet['assets'][asset]
total_table.add_row([asset, balance])
lines.append('TOTAL')
lines.append(total_table.get_string())
lines.append('')
print(os.linesep.join(lines))
def print_pending(awaiting_btcs):
table = PrettyTable(['Matched Order ID', 'Time Left'])
for order_match in awaiting_btcs:
order_match = format_order_match(order_match)
table.add_row(order_match)
print(table)
def print_getrows(rows):
if len(rows) > 0:
headers = list(rows[0].keys())
table = PrettyTable(headers)
for row in rows:
values = list(row.values())
table.add_row(values)
print(table)
else:
print("No result.")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | -7,392,429,104,027,358,000 | 32.294118 | 96 | 0.585815 | false |
Julian/home-assistant | homeassistant/components/media_player/demo.py | 1 | 11274 | """
Demo implementation of the media player.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_SELECT_SOURCE, SUPPORT_CLEAR_PLAYLIST, MediaPlayerDevice)
from homeassistant.const import STATE_OFF, STATE_PAUSED, STATE_PLAYING
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the media player demo platform."""
add_devices([
DemoYoutubePlayer(
'Living Room', 'eyU3bRy2x44',
'♥♥ The Best Fireplace Video (3 hours)'),
DemoYoutubePlayer('Bedroom', 'kxopViU98Xo', 'Epic sax guy 10 hours'),
DemoMusicPlayer(), DemoTVShowPlayer(),
])
YOUTUBE_COVER_URL_FORMAT = 'https://img.youtube.com/vi/{}/hqdefault.jpg'
YOUTUBE_PLAYER_SUPPORT = \
SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA
MUSIC_PLAYER_SUPPORT = \
SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_CLEAR_PLAYLIST
NETFLIX_PLAYER_SUPPORT = \
SUPPORT_PAUSE | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE
class AbstractDemoPlayer(MediaPlayerDevice):
"""A demo media players."""
# We only implement the methods that we support
# pylint: disable=abstract-method
def __init__(self, name):
"""Initialize the demo device."""
self._name = name
self._player_state = STATE_PLAYING
self._volume_level = 1.0
self._volume_muted = False
@property
def should_poll(self):
"""Push an update after each command."""
return False
@property
def name(self):
"""Return the name of the media player."""
return self._name
@property
def state(self):
"""Return the state of the player."""
return self._player_state
@property
def volume_level(self):
"""Return the volume level of the media player (0..1)."""
return self._volume_level
@property
def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
return self._volume_muted
def turn_on(self):
"""Turn the media player on."""
self._player_state = STATE_PLAYING
self.update_ha_state()
def turn_off(self):
"""Turn the media player off."""
self._player_state = STATE_OFF
self.update_ha_state()
def mute_volume(self, mute):
"""Mute the volume."""
self._volume_muted = mute
self.update_ha_state()
def set_volume_level(self, volume):
"""Set the volume level, range 0..1."""
self._volume_level = volume
self.update_ha_state()
def media_play(self):
"""Send play command."""
self._player_state = STATE_PLAYING
self.update_ha_state()
def media_pause(self):
"""Send pause command."""
self._player_state = STATE_PAUSED
self.update_ha_state()
class DemoYoutubePlayer(AbstractDemoPlayer):
"""A Demo media player that only supports YouTube."""
# We only implement the methods that we support
# pylint: disable=abstract-method
def __init__(self, name, youtube_id=None, media_title=None):
"""Initialize the demo device."""
super(DemoYoutubePlayer, self).__init__(name)
self.youtube_id = youtube_id
self._media_title = media_title
@property
def media_content_id(self):
"""Return the content ID of current playing media."""
return self.youtube_id
@property
def media_content_type(self):
"""Return the content type of current playing media."""
return MEDIA_TYPE_VIDEO
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
return 360
@property
def media_image_url(self):
"""Return the image url of current playing media."""
return YOUTUBE_COVER_URL_FORMAT.format(self.youtube_id)
@property
def media_title(self):
"""Return the title of current playing media."""
return self._media_title
@property
def app_name(self):
"""Return the current running application."""
return "YouTube"
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return YOUTUBE_PLAYER_SUPPORT
def play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
self.youtube_id = media_id
self.update_ha_state()
class DemoMusicPlayer(AbstractDemoPlayer):
"""A Demo media player that only supports YouTube."""
# We only implement the methods that we support
# pylint: disable=abstract-method
tracks = [
('Technohead', 'I Wanna Be A Hippy (Flamman & Abraxas Radio Mix)'),
('Paul Elstak', 'Luv U More'),
('Dune', 'Hardcore Vibes'),
('Nakatomi', 'Children Of The Night'),
('Party Animals',
'Have You Ever Been Mellow? (Flamman & Abraxas Radio Mix)'),
('Rob G.*', 'Ecstasy, You Got What I Need'),
('Lipstick', "I'm A Raver"),
('4 Tune Fairytales', 'My Little Fantasy (Radio Edit)'),
('Prophet', "The Big Boys Don't Cry"),
('Lovechild', 'All Out Of Love (DJ Weirdo & Sim Remix)'),
('Stingray & Sonic Driver', 'Cold As Ice (El Bruto Remix)'),
('Highlander', 'Hold Me Now (Bass-D & King Matthew Remix)'),
('Juggernaut', 'Ruffneck Rules Da Artcore Scene (12" Edit)'),
('Diss Reaction', 'Jiiieehaaaa '),
('Flamman And Abraxas', 'Good To Go (Radio Mix)'),
('Critical Mass', 'Dancing Together'),
('Charly Lownoise & Mental Theo',
'Ultimate Sex Track (Bass-D & King Matthew Remix)'),
]
def __init__(self):
"""Initialize the demo device."""
super(DemoMusicPlayer, self).__init__('Walkman')
self._cur_track = 0
@property
def media_content_id(self):
"""Return the content ID of current playing media."""
return 'bounzz-1'
@property
def media_content_type(self):
"""Return the content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
return 213
@property
def media_image_url(self):
"""Return the image url of current playing media."""
return 'https://graph.facebook.com/v2.5/107771475912710/' \
'picture?type=large'
@property
def media_title(self):
"""Return the title of current playing media."""
return self.tracks[self._cur_track][1] if len(self.tracks) > 0 else ""
@property
def media_artist(self):
"""Return the artist of current playing media (Music track only)."""
return self.tracks[self._cur_track][0] if len(self.tracks) > 0 else ""
@property
def media_album_name(self):
"""Return the album of current playing media (Music track only)."""
# pylint: disable=no-self-use
return "Bounzz"
@property
def media_track(self):
"""Return the track number of current media (Music track only)."""
return self._cur_track + 1
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
support = MUSIC_PLAYER_SUPPORT
if self._cur_track > 0:
support |= SUPPORT_PREVIOUS_TRACK
if self._cur_track < len(self.tracks) - 1:
support |= SUPPORT_NEXT_TRACK
return support
def media_previous_track(self):
"""Send previous track command."""
if self._cur_track > 0:
self._cur_track -= 1
self.update_ha_state()
def media_next_track(self):
"""Send next track command."""
if self._cur_track < len(self.tracks) - 1:
self._cur_track += 1
self.update_ha_state()
def clear_playlist(self):
"""Clear players playlist."""
self.tracks = []
self._cur_track = 0
self._player_state = STATE_OFF
self.update_ha_state()
class DemoTVShowPlayer(AbstractDemoPlayer):
"""A Demo media player that only supports YouTube."""
# We only implement the methods that we support
# pylint: disable=abstract-method
def __init__(self):
"""Initialize the demo device."""
super(DemoTVShowPlayer, self).__init__('Lounge room')
self._cur_episode = 1
self._episode_count = 13
self._source = 'dvd'
@property
def media_content_id(self):
"""Return the content ID of current playing media."""
return 'house-of-cards-1'
@property
def media_content_type(self):
"""Return the content type of current playing media."""
return MEDIA_TYPE_TVSHOW
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
return 3600
@property
def media_image_url(self):
"""Return the image url of current playing media."""
return 'https://graph.facebook.com/v2.5/HouseofCards/picture?width=400'
@property
def media_title(self):
"""Return the title of current playing media."""
return 'Chapter {}'.format(self._cur_episode)
@property
def media_series_title(self):
"""Return the series title of current playing media (TV Show only)."""
return 'House of Cards'
@property
def media_season(self):
"""Return the season of current playing media (TV Show only)."""
return 1
@property
def media_episode(self):
"""Return the episode of current playing media (TV Show only)."""
return self._cur_episode
@property
def app_name(self):
"""Return the current running application."""
return "Netflix"
@property
def source(self):
"""Return the current input source."""
return self._source
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
support = NETFLIX_PLAYER_SUPPORT
if self._cur_episode > 1:
support |= SUPPORT_PREVIOUS_TRACK
if self._cur_episode < self._episode_count:
support |= SUPPORT_NEXT_TRACK
return support
def media_previous_track(self):
"""Send previous track command."""
if self._cur_episode > 1:
self._cur_episode -= 1
self.update_ha_state()
def media_next_track(self):
"""Send next track command."""
if self._cur_episode < self._episode_count:
self._cur_episode += 1
self.update_ha_state()
def select_source(self, source):
"""Set the input source."""
self._source = source
self.update_ha_state()
| mit | 8,502,807,707,185,662,000 | 30.480447 | 79 | 0.61189 | false |
harigov/newsalyzer | crawler/YahooMailParser.py | 1 | 1198 | import urlparse
import os
from bs4 import BeautifulSoup
import re
from article_parser import ArticleParser
class YahooMailParser(ArticleParser):
def parse(self, url, article_text):
ArticleParser.parse(self,url,article_text)
def _get_title(self):
head = self._soup.find('header', {'class' : 'canvas-header'})
if head != None:
title = head.find('h1')
if title != None:
return title.getText()
return ''
def _get_date(self):
date = self._soup.find('div', {'class' : 'D'})
if(date != None):
return date.getText().encode('utf-8')
else:
date = self._soup.find('meta', {'name' : 'DISPLAYDATE'})
if date != None:
date = date.get('content')
if date != None:
return date.encode('utf-8')
return ''
def _get_article_text(self):
content = ''
for story_element in self._soup.findAll('div', {'class' : 'canvas-body'}):
if story_element != None:
# Remove newlines
content += re.sub(r"\n+", " ", story_element.getText())
return content
| mit | 5,628,196,657,293,056,000 | 31.378378 | 82 | 0.530885 | false |
livoras/feifanote-server | apis/_helpers.py | 1 | 1938 | # -*- coding: utf-8 -*-
from functools import wraps
from flask import session, request
from common.utils import message
from models.notebook import Notebook
from models.page import Page
from common import db
def require_login(route_fn):
"""
Decorator for router functions that need user to login first.
"""
@wraps(route_fn)
def _require_login(*argvs, **keywords):
if not session.get("is_login"):
return message("You have to login first.", 401)
else:
return route_fn(*argvs, **keywords)
return _require_login
def notebook_ownership_check(route_fn):
@wraps(route_fn)
def _route_fn(*argvs, **keywords):
data = request.json or {}
notebook_id = data.get("notebook_id")
notebook_id = notebook_id or keywords.get("notebook_id")
not_found = message("Notebook is not found.", 404)
if not session.get("is_login"):
return not_found
if not current_user_has_notebook(notebook_id):
return not_found
keywords.setdefault("notebook_id", notebook_id)
return route_fn(*argvs, **keywords)
return _route_fn
def current_user_has_notebook(notebook_id):
user_id = session.get("id")
notebook = db.session.query(Notebook) \
.filter_by(id=notebook_id, user_id=user_id) \
.first()
return notebook
def page_ownership_check(route_fn):
@wraps(route_fn)
def _route_fn(*argvs, **keywords):
data = request.json or {}
page_id = data.get("page_id")
page_id = page_id or keywords.get("page_id")
page = db.session.query(Page).filter_by(id=page_id).first()
if not page or page.notebook.user_id != session.get("id"):
return message("Page is not found.", 404)
keywords.setdefault("page_id", page_id)
return route_fn(*argvs, **keywords)
return _route_fn | mit | -2,775,173,960,715,875,300 | 34.907407 | 70 | 0.610939 | false |
sarwojowo/KerasProject | resnetcustom.py | 1 | 3281 | #python resnetcustom.py --file images/office.png
from __future__ import print_function
import numpy as np
import json
import os
import time
from keras import backend as K
from keras.preprocessing import image
from keras.applications import ResNet50
from keras.utils.data_utils import get_file
CLASS_INDEX = None
CLASS_INDEX_PATH = ('https://s3.amazonaws.com/deep-learning-models/'
'image-models/imagenet_class_index.json')
def preprocess_input(x, dim_ordering='default'):
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
assert dim_ordering in {'tf', 'th'}
if dim_ordering == 'th':
x[:, 0, :, :] -= 103.939
x[:, 1, :, :] -= 116.779
x[:, 2, :, :] -= 123.68
# 'RGB'->'BGR'
x = x[:, ::-1, :, :]
else:
x[:, :, :, 0] -= 103.939
x[:, :, :, 1] -= 116.779
x[:, :, :, 2] -= 123.68
# 'RGB'->'BGR'
x = x[:, :, :, ::-1]
return x
def decode_predictions(preds, top=5):
global CLASS_INDEX
if len(preds.shape) != 2 or preds.shape[1] != 1000:
raise ValueError('`decode_predictions` expects '
'a batch of predictions '
'(i.e. a 2D array of shape (samples, 1000)). '
'Found array with shape: ' + str(preds.shape))
if CLASS_INDEX is None:
fpath = get_file('imagenet_class_index.json',
CLASS_INDEX_PATH,
cache_subdir='models')
CLASS_INDEX = json.load(open(fpath))
results = []
for pred in preds:
top_indices = pred.argsort()[-top:][::-1]
result = [tuple(CLASS_INDEX[str(i)]) + (pred[i],) for i in top_indices]
results.append(result)
return results
def is_valid_file(parser, arg):
arg = os.path.abspath(arg)
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def get_parser():
"""Get parser object."""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-f", "--file",
dest="filename",
type=lambda x: is_valid_file(parser, x),
help="Classify image",
metavar="IMAGE",
required=True)
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
# Load model
model = ResNet50(include_top=True, weights='imagenet')
img_path = args.filename
img = image.load_img(img_path, target_size=(224, 224))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
# print('Input image shape:', x.shape)
t0 = time.time()
preds = model.predict(x)
t1 = time.time()
print("Prediction time: {:0.3f}s".format(t1 - t0))
for wordnet_id, class_name, prob in decode_predictions(preds)[0]:
print("{wid}\t{prob:>6}%\t{name}".format(wid=wordnet_id,
name=class_name,
prob="%0.2f" % (prob * 100)))
| mit | 4,369,614,500,541,529,600 | 30.548077 | 79 | 0.537031 | false |
datascopeanalytics/scrubadub | scrubadub/post_processors/text_replacers/prefix_suffix.py | 1 | 1176 | from typing import Optional, Sequence
from ...filth import Filth
from ..base import PostProcessor
class PrefixSuffixReplacer(PostProcessor):
name = 'prefix_suffix_replacer' # type: str
def __init__(self, prefix: Optional[str] = '{{', suffix: Optional[str] = '}}', name: Optional[str] = None):
super(PrefixSuffixReplacer, self).__init__(name=name)
self.prefix = prefix
self.suffix = suffix
def process_filth(self, filth_list: Sequence[Filth]) -> Sequence[Filth]:
for filth_item in filth_list:
if filth_item.replacement_string is None:
filth_item.replacement_string = filth_item.type.upper()
if self.prefix is not None and self.suffix is not None:
filth_item.replacement_string = self.prefix + filth_item.replacement_string + self.suffix
elif self.prefix is not None:
filth_item.replacement_string = self.prefix + filth_item.replacement_string
elif self.suffix is not None:
filth_item.replacement_string = filth_item.replacement_string + self.suffix
return filth_list
__all__ = ['PrefixSuffixReplacer']
| mit | -9,170,214,622,333,614,000 | 36.935484 | 111 | 0.64881 | false |
austinbyers/esprima-ast-visitor | visitor_test.py | 1 | 3065 | """Test the AST traversal."""
import gzip
import json
import unittest
import visitor
class TestTraverse(unittest.TestCase):
"""Verify that traversals progress in the correct order and terminate.
Test cases are drawn from a variety of very large real-world JS examples,
including popular libraries and code served by the Alexa top 10.
"""
def _test_ast_file(self, path: str) -> None:
"""Traverse the AST specified in the given test file."""
with gzip.open(path, 'rt') as ast_file:
ast_string = ast_file.read()
# The expected traversal is given by the layout of the JSON file.
expected_types = []
for line in ast_string.split('\n'):
words = line.strip().split()
if words[0] == '"type":':
expected_types.append(words[1].strip(',').strip('"'))
# Traverse the AST, keeping track of node types.
node = visitor.Program(json.loads(ast_string))
found_types = [n.type for n in node.traverse()]
self.assertEqual(expected_types, found_types)
# Dump the node back to a dict and make sure it parses again the same way.
reparsed = visitor.Program(node.dict())
newfound_types = [n.type for n in reparsed.traverse()]
self.assertEqual(expected_types, newfound_types)
# pylint: disable=missing-docstring
def test_amazon(self):
self._test_ast_file('test_ast/amazon.ast.gz')
def test_baidu(self):
self._test_ast_file('test_ast/baidu.ast.gz')
def test_facebook(self):
self._test_ast_file('test_ast/facebook.ast.gz')
def test_google(self):
self._test_ast_file('test_ast/google.ast.gz')
def test_handlebars(self):
self._test_ast_file('test_ast/handlebars.ast.gz')
def test_jquery(self):
self._test_ast_file('test_ast/jquery.ast.gz')
def test_jquery_ui(self):
self._test_ast_file('test_ast/jquery-ui.ast.gz')
def test_qq(self):
self._test_ast_file('test_ast/qq.ast.gz')
def test_sugar(self):
self._test_ast_file('test_ast/sugar.ast.gz')
def test_twitter(self):
self._test_ast_file('test_ast/twitter.ast.gz')
def test_wikipedia(self):
self._test_ast_file('test_ast/wikipedia.ast.gz')
def test_yahoo(self):
self._test_ast_file('test_ast/yahoo.ast.gz')
def test_remaining(self):
"""Test any node types which weren't already covered."""
data = {
'type': 'WithStatement',
'object': None,
'body': {
'type': 'DebuggerStatement'
}
}
node = visitor.objectify(data)
self.assertEqual(
['WithStatement', 'DebuggerStatement'], [n.type for n in node.traverse()])
def test_unexpected_node_type(self):
"""Verify traversal failure for an unknown node type."""
with self.assertRaises(visitor.UnknownNodeTypeError):
visitor.objectify({'type': 'FakeNodeType'})
if __name__ == '__main__':
unittest.main()
| mit | 7,185,851,426,806,917,000 | 30.597938 | 86 | 0.610114 | false |
GandhiNN/StarOS-cactipy | sgsnmme/get3gAttachSR.py | 1 | 5758 | #!/usr/bin/env python3
#
# author == __gandhi__
# ngakan.gandhi@packet-systems.com
import pexpect
import sys
import os
import json
import time
import re
# Load node user and password configuration file
def load_node_config(node_config_file):
with open(node_config_file) as node_confile:
return json.load(node_confile)
def get_node_user_pass(node_config):
node = sys.argv[1].upper()
user = ''
password = ''
user = node_config[node]['user']
password = node_config[node]['password']
return user, password
def get3gAttachSrStats(node_expect, ip_address, user, password):
with open("./sgsn_attach_sr_raw.log", 'w') as expect_log:
ssh_command = "ssh " + user + "@" + ip_address
ssh_newkey = "Are you sure you want to continue connecting"
child = pexpect.spawnu(ssh_command)
child.logfile = expect_log
# Set conditional based on matched patterns returned by "i" pexpect instance
i = child.expect([pexpect.TIMEOUT, ssh_newkey, "password:"])
if i == 0: # Timeout -> expect method match TIMEOUT pattern
print("ERROR!")
print("SSH could not login. Here is what SSH said:")
print(child.before, child.after)
return None
if i == 1: # SSH does not have the public key, just accept it
child.sendline("yes")
b = child.expect([pexpect.TIMEOUT, "password:"])
if b == 0: # Timeout
print("ERROR!")
print("SSH Could not login. Here is what SSH said:")
print(child.before, child.after)
if b == 1: # Continue
child.sendline(password)
child.expect(node_expect)
child.sendline('show gmm-sm statistics gmm-only sgsn-service sgsn-svc verbose')
child.expect(node_expect)
child.sendline('exit')
if i == 2: # SSH already has the public key
child.sendline(password)
child.expect(node_expect)
child.sendline('show gmm-sm statistics gmm-only sgsn-service sgsn-svc verbose')
child.expect(node_expect)
child.sendline('exit')
def parse_data(node, sgsn_attach_sr_raw_log):
with open(sgsn_attach_sr_raw_log, 'r') as parse_log:
lines = parse_log.readlines()
sgsnAttachAccept = ""
sgsnAttachReject = ""
sgsAttachFailure = ""
sgsnGprsSvcNotAllow = ""
sgsnGprsNonGprsSvcNotAllow = ""
sgsnRoamingNotAllowedLocArea = ""
sgsnGprsSvcNotAllowedPlmn = ""
sgsnNoSuitableCellsLocArea = ""
sgsnFailOngoingProc = ""
sgsnNetworkFailureExt = ""
for index, line in enumerate(lines):
if re.match(r"Attach Accept:", line):
line_sgsn_attach_accept = lines[index + 2]
sgsnAttachAccept = int(line_sgsn_attach_accept.strip().split()[1])
if re.match(r"Attach Reject:", line):
line_sgsn_attach_reject = lines[index + 2]
sgsnAttachReject = int(line_sgsn_attach_reject.strip().split()[1])
if re.match(r"Attach Failure:", line):
line_sgsn_attach_failure = lines[index + 2]
sgsnAttachFailure = int(line_sgsn_attach_failure.strip().split()[1])
if re.match(r"Gprs-Attach Reject Causes:", line):
line_gprs_svc_not_allow = lines[index + 4]
line_gprs_non_gprs_not_allow = lines[index + 6]
line_roaming_not_allow = lines[index + 12]
line_gprs_svc_not_allow_plmn = lines[index + 14]
line_no_suitable_cells_la = lines[index + 16]
sgsnGprsSvcNotAllow = int(line_gprs_svc_not_allow.strip().split()[4])
sgsnGprsNonGprsSvcNotAllow = int(line_gprs_non_gprs_not_allow.strip().split()[2])
sgsnRoamingNotAllowedLocArea = int(line_roaming_not_allow.strip().split()[3])
sgsnGprsSvcNotAllowedPlmn = int(line_gprs_svc_not_allow_plmn.strip().split()[3])
sgsnNoSuitableCellsLocArea = int(line_no_suitable_cells_la.strip().split()[3])
if re.match(r"Gprs-Attach Failure Causes:", line):
line_sgsn_failure_ongoing_proc = lines[index + 4]
sgsnFailOngoingProc = int(line_sgsn_failure_ongoing_proc.strip().split()[2])
if re.match(r"GPRS-Attach Network Failure Cause:", line):
line_sgsn_network_fail_ext = lines[index + 1]
sgsnNetworkFailureExt = int(line_sgsn_network_fail_ext.strip().split()[4])
# KPI Calc
sgsnAttachReq = sgsnAttachAccept + sgsnAttachReject + sgsnAttachFailure
sgsnAttachSr = (sgsnAttachAccept / (sgsnAttachReq - (sgsnGprsSvcNotAllow + sgsnGprsNonGprsSvcNotAllow
+ sgsnRoamingNotAllowedLocArea + sgsnGprsSvcNotAllowedPlmn + sgsnNoSuitableCellsLocArea
+ sgsnFailOngoingProc + sgsnNetworkFailureExt)) * 100)
message = '3gattach_sr:{0:.2f}'
print(message.format(sgsnAttachSr, end=''))
def main(argv):
node_expect = sys.argv[1].upper() + "#"
ip_address = sys.argv[2]
node_config_file = "/usr/share/cacti/site/COMMON/node_config.json"
node_config = load_node_config(node_config_file)
user, password = get_node_user_pass(node_config)
get3gAttachSrStats(node_expect, ip_address, user, password)
sgsn_attach_sr_raw = "./sgsn_attach_sr_raw.log"
parse_data(sys.argv[1].upper(), sgsn_attach_sr_raw)
os.remove(sgsn_attach_sr_raw)
if __name__ == "__main__":
main(sys.argv[1:]) | mit | -5,496,172,499,643,286,000 | 46 | 107 | 0.592741 | false |
vgrem/SharePointOnline-REST-Python-Client | office365/sharepoint/folder.py | 1 | 2600 | from office365.runtime.client_object import ClientObject
from office365.runtime.client_query import UpdateEntityQuery, DeleteEntityQuery
from office365.runtime.resource_path import ResourcePath
from office365.runtime.resource_path_service_operation import ResourcePathServiceOperation
from office365.sharepoint.listitem import ListItem
class Folder(ClientObject):
"""Represents a folder in a SharePoint Web site."""
def rename(self, name):
"""Rename a Folder resource"""
item = self.list_item_all_fields
item.properties['Title'] = name
item.properties['FileLeafRef'] = name
qry = UpdateEntityQuery(item)
self.context.add_query(qry)
def update(self):
qry = UpdateEntityQuery(self)
self.context.add_query(qry)
def delete_object(self):
"""Deletes the folder."""
qry = DeleteEntityQuery(self)
self.context.add_query(qry)
# self.removeFromParentCollection()
@property
def list_item_all_fields(self):
"""Specifies the list item field (2) values for the list item corresponding to the folder."""
if self.is_property_available('ListItemAllFields'):
return self.properties["ListItemAllFields"]
else:
return ListItem(self.context, ResourcePath("ListItemAllFields", self.resourcePath))
@property
def files(self):
"""Get a file collection"""
if self.is_property_available('Files'):
return self.properties["Files"]
else:
from office365.sharepoint.file_collection import FileCollection
return FileCollection(self.context, ResourcePath("Files", self.resourcePath))
@property
def folders(self):
"""Get a folder collection"""
if self.is_property_available('Folders'):
return self.properties["Folders"]
else:
from office365.sharepoint.folder_collection import FolderCollection
return FolderCollection(self.context, ResourcePath("Folders", self.resourcePath))
def set_property(self, name, value, persist_changes=True):
super(Folder, self).set_property(name, value, persist_changes)
# fallback: create a new resource path
if self._resource_path is None:
if name == "ServerRelativeUrl":
self._resource_path = ResourcePathServiceOperation("GetFolderByServerRelativeUrl", [value], ResourcePath("Web"))
elif name == "UniqueId":
self._resource_path = ResourcePathServiceOperation("GetFolderById", [value], ResourcePath("Web"))
| mit | -3,152,273,541,855,555,000 | 40.269841 | 128 | 0.673846 | false |
rth/PyAbel | examples/example_linbasex_hansenlaw.py | 1 | 3092 | # -*- coding: utf-8 -*-
import numpy as np
import abel
import matplotlib.pyplot as plt
IM = np.loadtxt("data/VMI_art1.txt.bz2")
legendre_orders = [0, 2, 4] # Legendre polynomial orders
proj_angles = range(0, 180, 10) # projection angles in 10 degree steps
radial_step = 1 # pixel grid
smoothing = 1 # smoothing 1/e-width for Gaussian convolution smoothing
threshold = 0.2 # threshold for normalization of higher order Newton spheres
clip=0 # clip first vectors (smallest Newton spheres) to avoid singularities
# linbasex method - center ensures image has odd square shape
# - speed and anisotropy parameters evaluated by method
LIM = abel.Transform(IM, method='linbasex', center='convolution',
center_options=dict(square=True),
transform_options=dict(basis_dir=None,
proj_angles=proj_angles, radial_step=radial_step,
smoothing=smoothing, threshold=threshold, clip=clip,
return_Beta=True, verbose=True))
# hansenlaw method - speed and anisotropy parameters evaluated by integration
HIM = abel.Transform(IM, method="hansenlaw", center='convolution',
center_options=dict(square=True),
angular_integration=True)
# alternative derivation of anisotropy parameters via integration
rrange = [(20, 50), (60, 80), (85, 100), (125, 155), (185, 205), (220, 240)]
Beta, Amp, rr, intensity, theta =\
abel.tools.vmi.radial_integration(HIM.transform, radial_ranges=rrange)
plt.figure(figsize=(12, 6))
ax0 = plt.subplot2grid((2,4), (0,0))
ax3 = plt.subplot2grid((2,4), (1,0))
ax1 = plt.subplot2grid((2,4), (0,1), colspan=2, rowspan=2)
ax2 = plt.subplot2grid((2,4), (0,3), sharex=ax1, rowspan=2)
ax0.imshow(LIM.transform, vmin=0, vmax=LIM.transform.max()*2/3)
ax0.set_aspect('equal')
ax0.axis('off')
ax0.invert_yaxis()
ax0.set_title("linbasex")
ax3.imshow(HIM.transform, vmin=0, vmax=HIM.transform[200:].max()*1/5)
ax3.axis('off')
#ax3.axis(xmin=750, xmax=850, ymin=420, ymax=620)
ax3.invert_yaxis()
ax3.set_aspect('equal')
ax3.set_title("hansenlaw")
ax1.plot(LIM.radial, LIM.Beta[0], 'r-', label='linbasex')
ax1.plot(HIM.angular_integration[1]/HIM.angular_integration[1].max(),
'b-', label='hansenlaw')
ax1.legend(loc=0, labelspacing=0.1, frameon=False, numpoints=1, fontsize=10)
ax1.set_title("Beta0 norm an={} un={} inc={} sig={} th={}".
format(proj_angles, legendre_orders, radial_step, smoothing,
threshold), fontsize=10)
ax1.axis(ymin=-0.1, ymax=1.2)
ax1.set_xlabel("radial coordinate (pixels)")
ax2.plot(LIM.radial, LIM.Beta[1], 'r-', label='linbasex')
beta = np.transpose(Beta)
ax2.errorbar(x=rr, y=beta[0], yerr=beta[1], color='b', lw=2, fmt='o',
label='hansenlaw')
ax2.set_title(r"$\beta$-parameter (Beta2 norm)", fontsize=10)
ax2.legend(loc=0, labelspacing=0.1, frameon=False, numpoints=1, fontsize=10)
ax2.axis(xmax=300, ymin=-1.0, ymax=1.0)
ax2.set_xlabel("radial coordinate (pixels)")
plt.savefig("example_linbasex_hansenlaw.png", dpi=100)
plt.show()
| mit | -238,230,129,545,215,360 | 41.944444 | 77 | 0.67044 | false |
b1-systems/kiwi | kiwi/utils/size.py | 1 | 1696 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import re
import math
from kiwi.exceptions import KiwiSizeError
class StringToSize(object):
"""
**Performs size convertions from strings to numbers**
"""
@classmethod
def to_bytes(cls, size_value):
"""
Convert the given string representig a size into the appropriate
number of bytes.
:param str size_value: a size in bytes or specified with m=MB or g=GB
:return: the number of bytes represented by size_value string
:rtype: int
"""
size_format = '^(\d+)([gGmM]{0,1})$'
size = re.search(size_format, size_value)
if not size:
raise KiwiSizeError(
'unsupported size format {0}, must match {1}'.format(
size_value, size_format
)
)
size_base = int(size.group(1))
size_unit = {'g': 3, 'm': 2}.get(size.group(2).lower())
return size_unit and size_base * math.pow(0x400, size_unit) or size_base
| gpl-3.0 | -5,083,597,701,060,993,000 | 33.612245 | 80 | 0.647995 | false |
umitproject/network-admin | netadmin/users/forms.py | 1 | 2410 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Adriano Monteiro Marques
#
# Author: Amit Pal <amix.pal@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.forms import AdminPasswordChangeForm
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from datetime import datetime
import pytz
from pytz import timezone
import pdb
from netadmin.users.models import UserProfile
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('is_public', 'in_search')
class UserRegistrationForm(UserCreationForm):
email2 = forms.EmailField(label=_("E-mail"))
timezone2 = forms.ChoiceField(choices=[(x, x) for x in pytz.common_timezones], label = _("TimeZone"))
skype = forms.CharField(max_length=20)
def clean_email2(self):
email2 = self.cleaned_data['email2']
try:
user = User.objects.get(email=email2)
except User.DoesNotExist:
return email2
raise forms.ValidationError(_("Account with this e-mail address already exists."))
def save(self, commit=True):
user = super(UserRegistrationForm, self).save(commit=False)
user.email = self.cleaned_data["email2"]
user.is_active = False
if commit:
user.save()
user_profile = user.get_profile()
user_profile.timezone = self.cleaned_data["timezone2"]
user_profile.skype = self.cleaned_data["skype"]
user_profile.save()
return user
| agpl-3.0 | -4,128,243,789,339,901,000 | 34.970149 | 105 | 0.692116 | false |
lehmannro/pootle | pootle/auth/ldap_backend.py | 1 | 3490 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009 Mozilla Corporation, Zuza Software Foundation
#
# This file is part of Pootle.
#
# Pootle is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Pootle is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pootle; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import ldap
import ldap.filter
import logging
from django.conf import settings
from django.contrib.auth.models import User
class LdapBackend(object):
"""
This is a Django authentication module which implements LDAP
authentication.
To use this module, simply add it to the tuple AUTHENTICATION_BACKENDS
in settings.py.
"""
def authenticate(self, username=None, password=None):
logger = logging.getLogger('pootle.auth.ldap')
ldo = ldap.initialize(settings.AUTH_LDAP_SERVER)
ldo.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
try:
ldo.simple_bind_s(settings.AUTH_LDAP_ANON_DN, settings.AUTH_LDAP_ANON_PASS)
result = ldo.search_s(settings.AUTH_LDAP_BASE_DN, ldap.SCOPE_SUBTREE, ldap.filter.escape_filter_chars(settings.AUTH_LDAP_FILTER) % username, settings.AUTH_LDAP_FIELDS.values())
if len(result) != 1:
logger.debug("More or less than 1 matching account for (%s). Failing LDAP auth." % (username))
return None
except ldap.INVALID_CREDENTIALS:
logger.error('Anonymous bind to LDAP server failed. Please check the username and password.')
return None
except Exception, e:
logger.error('Unknown LDAP error: ' + str(e))
return None
try:
ldo.simple_bind_s(result[0][0], password)
logger.debug("Successful LDAP login for user (%s)" % (username))
try:
user = User.objects.get(username=username)
return user
except User.DoesNotExist:
logger.info("First login for LDAP user (%s). Creating new account." % username)
user = User(username=username, is_active=True)
user.password = 'LDAP_%s' % (User.objects.make_random_password(32))
for i in settings.AUTH_LDAP_FIELDS:
if i != 'dn' and len(settings.AUTH_LDAP_FIELDS[i]) > 0:
setattr(user,i,result[0][1][settings.AUTH_LDAP_FIELDS[i]][0])
user.save()
return user
except (ldap.INVALID_CREDENTIALS, ldap.UNWILLING_TO_PERFORM): # Bad e-mail or password
logger.debug("No account or bad credentials for (%s). Failing LDAP auth." % (username))
return None
except Exception, e: # No other exceptions are normal, so we raise this.
logger.error('Unknown LDAP error: ' + str(e))
raise
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| gpl-2.0 | -7,947,188,402,403,043,000 | 38.659091 | 188 | 0.638968 | false |
westernmagic/NumPDE | series4/2d-rad-cooling/plot_on_mesh.py | 1 | 1202 | from numpy import *
from pylab import *
import matplotlib.tri
from math import atan2
vertices = loadtxt("vertices.txt")
indices = loadtxt("triangles.txt")
uImplicit = loadtxt("u_implicit.txt")
energyImplicit = loadtxt("energy_implicit.txt")
uExplicitNoCfl = loadtxt("u_explicit_no_cfl.txt")
energyExplicitNoCfl = loadtxt("energy_explicit_no_cfl.txt")
uExplicitCfl = loadtxt("u_explicit_cfl.txt")
energyExplicitCfl = loadtxt("energy_explicit_cfl.txt")
print("using %d vertices" % len(vertices))
grid = matplotlib.tri.Triangulation( vertices[:,0], vertices[:,1], indices)
tripcolor(grid, uImplicit);
title('Implicit')
colorbar()
show()
tripcolor(grid, uExplicitNoCfl);
title('Explicit (No CFL)')
colorbar()
show()
tripcolor(grid, uExplicitCfl);
title('Explicit CFL')
colorbar()
show()
tImplicit = linspace(0, 1, len(energyImplicit))
tExplicitNoCfl = linspace(0,1,len(energyExplicitNoCfl))
tExplicitCfl = linspace(0,1,len(energyExplicitCfl))
plot(tImplicit, energyImplicit, label='Implicit')
plot(tExplicitNoCfl, energyExplicitNoCfl, label='Explicit (No CFL)')
plot(tExplicitCfl, energyExplicitCfl, label='Explicit (CFL)')
ylim([-3,3])
xlabel('Time (t)')
ylabel('Energy(E)')
legend()
show()
| mit | 7,751,457,936,035,289,000 | 22.568627 | 75 | 0.748752 | false |
DiamondLightSource/diffcalc | diffcalc/ub/reflections.py | 1 | 5528 | ###
# Copyright 2008-2011 Diamond Light Source Ltd.
# This file is part of Diffcalc.
#
# Diffcalc is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Diffcalc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Diffcalc. If not, see <http://www.gnu.org/licenses/>.
###
from copy import deepcopy
import datetime # @UnusedImport for the eval below
from diffcalc.util import DiffcalcException, bold
from diffcalc.hkl.you.geometry import YouPosition
class _Reflection:
"""A reflection"""
def __init__(self, h, k, l, position, energy, tag, time):
self.h = float(h)
self.k = float(k)
self.l = float(l)
self.pos = position
self.tag = tag
self.energy = float(energy) # energy=12.39842/lambda
self.wavelength = 12.3984 / self.energy
self.time = time # Saved as e.g. repr(datetime.now())
def __str__(self):
return ("energy=%-6.3f h=%-4.2f k=%-4.2f l=%-4.2f mu=%-8.4f "
"delta=%-8.4f nu=%-8.4f eta=%-8.4f chi=%-8.4f "
"phi=%-8.4f %-s %s" % (self.energy, self.h, self.k, self.l,
self.pos.mu, self.pos.delta, self.pos.nu, self.pos.eta,
self.pos.chi, self.pos.phi, self.tag, self.time))
class ReflectionList:
def __init__(self, geometry, externalAngleNames, reflections=None, multiplier=1):
self._geometry = geometry
self._externalAngleNames = externalAngleNames
self._reflist = reflections if reflections else []
self._multiplier = multiplier
def get_tag_index(self, idx):
_tag_list = [ref.tag for ref in self._reflist]
try:
num = _tag_list.index(idx)
except ValueError:
if isinstance(idx, int):
if idx < 1 or idx > len(self._reflist):
raise IndexError("Reflection index is out of range")
else:
num = idx - 1
else:
raise IndexError("Reflection index not found")
return num
def add_reflection(self, h, k, l, position, energy, tag, time):
"""adds a reflection, position in degrees
"""
if type(position) in (list, tuple):
try:
position = self._geometry.create_position(*position)
except AttributeError:
position = YouPosition(*position)
self._reflist += [_Reflection(h, k, l, position, energy, tag, time.__repr__())]
def edit_reflection(self, idx, h, k, l, position, energy, tag, time):
"""num starts at 1"""
try:
num = self.get_tag_index(idx)
except IndexError:
raise DiffcalcException("There is no reflection " + repr(idx)
+ " to edit.")
if type(position) in (list, tuple):
position = YouPosition(*position)
self._reflist[num] = _Reflection(h, k, l, position, energy, tag, time.__repr__())
def getReflection(self, idx):
"""
getReflection(idx) --> ( [h, k, l], position, energy, tag, time ) --
position in degrees
"""
num = self.get_tag_index(idx)
r = deepcopy(self._reflist[num]) # for convenience
return [r.h, r.k, r.l], deepcopy(r.pos), r.energy, r.tag, eval(r.time)
def get_reflection_in_external_angles(self, idx):
"""getReflection(num) --> ( [h, k, l], (angle1...angleN), energy, tag )
-- position in degrees"""
num = self.get_tag_index(idx)
r = deepcopy(self._reflist[num]) # for convenience
externalAngles = self._geometry.internal_position_to_physical_angles(r.pos)
result = [r.h, r.k, r.l], externalAngles, r.energy, r.tag, eval(r.time)
return result
def removeReflection(self, idx):
num = self.get_tag_index(idx)
del self._reflist[num]
def swap_reflections(self, idx1, idx2):
num1 = self.get_tag_index(idx1)
num2 = self.get_tag_index(idx2)
orig1 = self._reflist[num1]
self._reflist[num1] = self._reflist[num2]
self._reflist[num2] = orig1
def __len__(self):
return len(self._reflist)
def __str__(self):
return '\n'.join(self.str_lines())
def str_lines(self):
axes = tuple(s.upper() for s in self._externalAngleNames)
if not self._reflist:
return [" <<< none specified >>>"]
lines = []
format = (" %6s %5s %5s %5s " + "%8s " * len(axes) + " TAG")
values = ('ENERGY', 'H', 'K', 'L') + axes
lines.append(bold(format % values))
for n in range(1, len(self._reflist) + 1):
ref_tuple = self.get_reflection_in_external_angles(n)
[h, k, l], externalAngles, energy, tag, _ = ref_tuple
if tag is None:
tag = ""
format = (" %2d %6.3f % 4.2f % 4.2f % 4.2f " +
"% 8.4f " * len(axes) + " %s")
values = (n, energy / self._multiplier, h, k, l) + externalAngles + (tag,)
lines.append(format % values)
return lines
| gpl-3.0 | -8,610,430,123,077,464,000 | 37.388889 | 89 | 0.571274 | false |
redhatrises/classification-banner | classification-banner.py | 1 | 14507 | #!/usr/bin/python
# Classification Banner
#
# Last update was 02 July 2018
#
# Script: classification-banner.py
# Description: Displays a Classification for an Xwindows session
# Copyright: Frank Caviggia, 2017
# Author: Frank Caviggia <fcaviggia (at) gmail.com>
# Version: 1.6.6
# License: GPLv2
import sys,os,optparse,time
from socket import gethostname
# Check if DISPLAY variable is set
try:
os.environ["DISPLAY"]
import pygtk,gtk
except:
try:
import Gtk
except:
print("Error: DISPLAY environment variable not set.")
sys.exit(1)
# Global Configuration File
CONF_FILE = "/etc/classification-banner"
# Returns Username
def get_user():
try:
user = os.getlogin()
except:
user = ''
pass
return user
# Returns Hostname
def get_host():
host = gethostname()
host = host.split('.')[0]
return host
# Classification Banner Class
class Classification_Banner:
"""Class to create and refresh the actual banner."""
def __init__(self, message="UNCLASSIFIED", fgcolor="#000000",
bgcolor="#00CC00", face="liberation-sans", size="small",
weight="bold", x=0, y=0, esc=True, opacity=0.75, sys_info=False):
"""Set up and display the main window
Keyword arguments:
message -- The classification level to display
fgcolor -- Foreground color of the text to display
bgcolor -- Background color of the banner the text is against
face -- Font face to use for the displayed text
size -- Size of font to use for text
weight -- Bold or normal
hres -- Horizontal Screen Resolution (int) [ requires vres ]
vres -- Vertical Screen Resolution (int) [ requires hres ]
opacity -- Opacity of window (float) [0 .. 1, default 0.75]
"""
self.hres = x
self.vres = y
# Dynamic Resolution Scaling
self.monitor = gtk.gdk.Screen()
self.monitor.connect("size-changed", self.resize)
# Newer versions of pygtk have this method
try:
self.monitor.connect("monitors-changed", self.resize)
except:
pass
# Create Main Window
self.window = gtk.Window()
self.window.set_position(gtk.WIN_POS_CENTER)
self.window.connect("hide", self.restore)
self.window.connect("key-press-event", self.keypress)
self.window.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(bgcolor))
self.window.set_property('skip-taskbar-hint', True)
self.window.set_property('skip-pager-hint', True)
self.window.set_property('destroy-with-parent', True)
self.window.stick()
self.window.set_decorated(False)
self.window.set_keep_above(True)
self.window.set_app_paintable(True)
try:
self.window.set_opacity(opacity)
except:
pass
# Set the default window size
self.window.set_default_size(int(self.hres), 5)
# Create Main Horizontal Box to Populate
self.hbox = gtk.HBox()
# Create the Center Vertical Box
self.vbox_center = gtk.VBox()
self.center_label = gtk.Label(
"<span font_family='%s' weight='%s' foreground='%s' size='%s'>%s</span>" %
(face, weight, fgcolor, size, message))
self.center_label.set_use_markup(True)
self.center_label.set_justify(gtk.JUSTIFY_CENTER)
self.vbox_center.pack_start(self.center_label, True, True, 0)
# Create the Right-Justified Vertical Box to Populate for hostname
self.vbox_right = gtk.VBox()
self.host_label = gtk.Label(
"<span font_family='%s' weight='%s' foreground='%s' size='%s'>%s</span>" %
(face, weight, fgcolor, size, get_host()))
self.host_label.set_use_markup(True)
self.host_label.set_justify(gtk.JUSTIFY_RIGHT)
self.host_label.set_width_chars(20)
# Create the Left-Justified Vertical Box to Populate for user
self.vbox_left = gtk.VBox()
self.user_label = gtk.Label(
"<span font_family='%s' weight='%s' foreground='%s' size='%s'>%s</span>" %
(face, weight, fgcolor, size, get_user()))
self.user_label.set_use_markup(True)
self.user_label.set_justify(gtk.JUSTIFY_LEFT)
self.user_label.set_width_chars(20)
# Create the Right-Justified Vertical Box to Populate for ESC message
self.vbox_esc_right = gtk.VBox()
self.esc_label = gtk.Label(
"<span font_family='liberation-sans' weight='normal' foreground='%s' size='xx-small'> (ESC to hide temporarily) </span>" %
(fgcolor))
self.esc_label.set_use_markup(True)
self.esc_label.set_justify(gtk.JUSTIFY_RIGHT)
self.esc_label.set_width_chars(20)
# Empty Label for formatting purposes
self.vbox_empty = gtk.VBox()
self.empty_label = gtk.Label(
"<span font_family='liberation-sans' weight='normal'> </span>")
self.empty_label.set_use_markup(True)
self.empty_label.set_width_chars(20)
if not esc:
if not sys_info:
self.hbox.pack_start(self.vbox_center, True, True, 0)
else:
self.vbox_right.pack_start(self.host_label, True, True, 0)
self.vbox_left.pack_start(self.user_label, True, True, 0)
self.hbox.pack_start(self.vbox_right, False, True, 20)
self.hbox.pack_start(self.vbox_center, True, True, 0)
self.hbox.pack_start(self.vbox_left, False, True, 20)
else:
if esc and not sys_info:
self.empty_label.set_justify(gtk.JUSTIFY_LEFT)
self.vbox_empty.pack_start(self.empty_label, True, True, 0)
self.vbox_esc_right.pack_start(self.esc_label, True, True, 0)
self.hbox.pack_start(self.vbox_esc_right, False, True, 0)
self.hbox.pack_start(self.vbox_center, True, True, 0)
self.hbox.pack_start(self.vbox_empty, False, True, 0)
if sys_info:
self.vbox_right.pack_start(self.host_label, True, True, 0)
self.vbox_left.pack_start(self.user_label, True, True, 0)
self.hbox.pack_start(self.vbox_right, False, True, 20)
self.hbox.pack_start(self.vbox_center, True, True, 0)
self.hbox.pack_start(self.vbox_left, False, True, 20)
self.window.add(self.hbox)
self.window.show_all()
self.width, self.height = self.window.get_size()
# Restore Minimized Window
def restore(self, widget, data=None):
self.window.deiconify()
self.window.present()
return True
# Destroy Classification Banner Window on Resize (Display Banner Will Relaunch)
def resize(self, widget, data=None):
self.window.destroy()
return True
# Press ESC to hide window for 15 seconds
def keypress(self, widget, event=None):
if event.keyval == 65307:
if not gtk.events_pending():
self.window.iconify()
self.window.hide()
time.sleep(15)
self.window.show()
self.window.deiconify()
self.window.present()
return True
class Display_Banner:
"""Display Classification Banner Message"""
def __init__(self):
# Dynamic Resolution Scaling
self.monitor = gtk.gdk.Screen()
self.monitor.connect("size-changed", self.resize)
# Newer versions of pygtk have this method
try:
self.monitor.connect("monitors-changed", self.resize)
except:
pass
# Launch Banner
self.config, self.args = self.configure()
self.execute(self.config)
# Read Global configuration
def configure(self):
config = {}
try:
execfile(CONF_FILE, config)
except:
pass
defaults = {}
defaults["message"] = config.get("message", "UNCLASSIFIED")
defaults["fgcolor"] = config.get("fgcolor", "#FFFFFF")
defaults["bgcolor"] = config.get("bgcolor", "#007A33")
defaults["face"] = config.get("face", "liberation-sans")
defaults["size"] = config.get("size", "small")
defaults["weight"] = config.get("weight", "bold")
defaults["show_top"] = config.get("show_top", True)
defaults["show_bottom"] = config.get("show_bottom", True)
defaults["hres"] = config.get("hres", 0)
defaults["vres"] = config.get("vres", 0)
defaults["sys_info"] = config.get("sys_info", False)
defaults["opacity"] = config.get("opacity", 0.75)
defaults["esc"] = config.get("esc", True)
defaults["spanning"] = config.get("spanning", False)
# Use the global config to set defaults for command line options
parser = optparse.OptionParser()
parser.add_option("-m", "--message", default=defaults["message"],
help="Set the Classification message")
parser.add_option("-f", "--fgcolor", default=defaults["fgcolor"],
help="Set the Foreground (text) color")
parser.add_option("-b", "--bgcolor", default=defaults["bgcolor"],
help="Set the Background color")
parser.add_option("-x", "--hres", default=defaults["hres"], type="int",
help="Set the Horizontal Screen Resolution")
parser.add_option("-y", "--vres", default=defaults["vres"], type="int",
help="Set the Vertical Screen Resolution")
parser.add_option("-o", "--opacity", default=defaults["opacity"],
type="float", dest="opacity",
help="Set the window opacity for composted window managers")
parser.add_option("--face", default=defaults["face"], help="Font face")
parser.add_option("--size", default=defaults["size"], help="Font size")
parser.add_option("--weight", default=defaults["weight"],
help="Set the Font weight")
parser.add_option("--disable-esc-msg", default=defaults["esc"],
dest="esc", action="store_false",
help="Disable the 'ESC to hide' message")
parser.add_option("--hide-top", default=defaults["show_top"],
dest="show_top", action="store_false",
help="Disable the top banner")
parser.add_option("--hide-bottom", default=defaults["show_bottom"],
dest="show_bottom", action="store_false",
help="Disable the bottom banner")
parser.add_option("--system-info", default=defaults["sys_info"],
dest="sys_info", action="store_true",
help="Show user and hostname in the top banner")
parser.add_option("--enable-spanning", default=defaults["spanning"],
dest="spanning", action="store_true",
help="Enable banner(s) to span across screens as a single banner")
options, args = parser.parse_args()
return options, args
# Launch the Classification Banner Window(s)
def execute(self, options):
self.num_monitor = 0
if options.hres == 0 or options.vres == 0:
# Try Xrandr to determine primary monitor resolution
try:
self.screen = os.popen("xrandr | grep ' connected ' | awk '{ print $3 }'").readlines()[0]
self.x = self.screen.split('x')[0]
self.y = self.screen.split('x')[1].split('+')[0]
except:
try:
self.screen = os.popen("xrandr | grep ' current ' | awk '{ print $8$9$10+0 }'").readlines()[0]
self.x = self.screen.split('x')[0]
self.y = self.screen.split('x')[1].split('+')[0]
except:
self.screen = os.popen("xrandr | grep '^\*0' | awk '{ print $2$3$4 }'").readlines()[0]
self.x = self.screen.split('x')[0]
self.y = self.screen.split('x')[1].split('+')[0]
else:
# Fail back to GTK method
self.display = gtk.gdk.display_get_default()
self.screen = self.display.get_default_screen()
self.x = self.screen.get_width()
self.y = self.screen.get_height()
else:
# Resoultion Set Staticly
self.x = options.hres
self.y = options.vres
if not options.spanning and self.num_monitor > 1:
for monitor in range(self.num_monitor):
mon_geo = self.screen.get_monitor_geometry(monitor)
self.x_location, self.y_location, self.x, self.y = mon_geo
self.banners(options)
else:
self.x_location = 0
self.y_location = 0
self.banners(options)
def banners(self, options):
if options.show_top:
top = Classification_Banner(
options.message,
options.fgcolor,
options.bgcolor,
options.face,
options.size,
options.weight,
self.x,
self.y,
options.esc,
options.opacity,
options.sys_info)
top.window.move(self.x_location, self.y_location)
if options.show_bottom:
bottom = Classification_Banner(
options.message,
options.fgcolor,
options.bgcolor,
options.face,
options.size,
options.weight,
self.x,
self.y,
options.esc,
options.opacity)
bottom.window.move(self.x_location, int(bottom.vres))
# Relaunch the Classification Banner on Screen Resize
def resize(self, widget, data=None):
self.config, self.args = self.configure()
self.execute(self.config)
return True
# Main Program Loop
if __name__ == "__main__":
run = Display_Banner()
gtk.main()
| gpl-2.0 | 1,204,417,468,584,060,200 | 38.52861 | 136 | 0.559799 | false |
jelly/calibre | src/calibre/db/cli/cmd_show_metadata.py | 2 | 1416 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
# License: GPLv3 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
from calibre import prints
from calibre.ebooks.metadata.opf2 import OPFCreator
readonly = True
version = 0 # change this if you change signature of implementation()
def implementation(db, notify_changes, book_id):
with db.safe_read_lock:
if not db.has_id(book_id):
return
return db.get_metadata(book_id)
def option_parser(get_parser, args):
parser = get_parser(
_(
'''
%prog show_metadata [options] id
Show the metadata stored in the calibre database for the book identified by id.
id is an id number from the search command.
'''
)
)
parser.add_option(
'--as-opf',
default=False,
action='store_true',
help=_('Print metadata in OPF form (XML)')
)
return parser
def main(opts, args, dbctx):
if len(args) < 1:
raise SystemExit(_('You must specify an id'))
book_id = int(args[0])
mi = dbctx.run('show_metadata', book_id)
if mi is None:
raise SystemExit('Id #%d is not present in database.' % id)
if opts.as_opf:
mi = OPFCreator(os.getcwdu(), mi)
mi.render(sys.stdout)
else:
prints(unicode(mi))
return 0
| gpl-3.0 | -4,527,065,528,993,158,000 | 23.842105 | 82 | 0.636299 | false |
grimoirelab/sortinghat | sortinghat/parsing/gitdm.py | 1 | 12677 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2021 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Santiago Dueñas <sduenas@bitergia.com>
# Quan Zhou <quan@bitergia.com>
#
import dateutil.parser
import logging
import re
from ..db.model import MIN_PERIOD_DATE, MAX_PERIOD_DATE, UniqueIdentity,\
Identity, Enrollment, Organization, Domain
from ..exceptions import InvalidFormatError
logger = logging.getLogger(__name__)
class GitdmParser(object):
"""Parse identities and organizations using Gitdm files.
Gitdm provides several files that include information about
identities, organizations and affiliations. This parser is able
to parse anyone of these file formats, together or separate.
The unique identities are stored in an object named 'uidentities'.
The keys of this object are the UUID of the unique identities.
Each unique identity object stores a list of identities and
enrollments. Email addresses will not be validated when `email_validation`
is set to `False`.
Organizations are stored in 'organizations' object. Its keys
are the name of the organizations and each organization object is
related to a list of domains.
:param aliases: aliases stream
:param email_to_employer: enrollments stream
:param domain_to_employer: organizations stream
:param source: source of the data
:param email_validation: validate email addresses; set to True by default
:raises InvalidFormatError: raised when the format of any of the
given streams is not valid.
"""
# Common Gitdm patterns
VALID_LINE_REGEX = r"^(\S+)[ \t]+([^#\n\r\f\v]+[^#\s]*)(?:([ \t]+#.*)?|\s*)$"
LINES_TO_IGNORE_REGEX = r"^\s*(?:#.*)?\s*$"
EMAIL_ADDRESS_REGEX = r"^(?P<email>[^\s@]+@[^\s@.]+\.[^\s@]+)$"
ORGANIZATION_REGEX = r"^(?P<organization>[^#<\t\n\r\f\v]*[^#<\t\n\r\f\v\s])?$"
DOMAIN_REGEX = r"^(?P<domain>\w\S+)$"
ENROLLMENT_REGEX = r"^(?P<organization>[^#<\n\r\f\v]*[^#<\t\n\r\f\v\s])(?:[ \t]+<[ \t]+(?P<date>\d{4}\-\d{2}\-\d{2}))?$"
def __init__(self, aliases=None, email_to_employer=None,
domain_to_employer=None, source='gitdm', email_validation=True):
self._identities = {}
self._organizations = {}
self.source = source
self.email_validation = email_validation
# Raw data
self.__raw_identities = {}
self.__raw_aliases = {}
self.__raw_orgs = {}
self.__parse(aliases, email_to_employer,
domain_to_employer)
@property
def identities(self):
uids = [u for u in self._identities.values()]
uids.sort(key=lambda u: u.uuid)
return uids
@property
def organizations(self):
orgs = [o for o in self._organizations.values()]
orgs.sort(key=lambda o: o.name)
return orgs
def __parse(self, aliases, email_to_employer, domain_to_employer):
"""Parse Gitdm streams"""
self.__parse_organizations(domain_to_employer)
self.__parse_identities(aliases, email_to_employer)
def __parse_identities(self, aliases, email_to_employer):
"""Parse Gitdm identities"""
# Parse streams
self.__parse_aliases_stream(aliases)
self.__parse_email_to_employer_stream(email_to_employer)
# Create unique identities from aliases list
for alias, email in self.__raw_aliases.items():
uid = self._identities.get(email, None)
if not uid:
uid = UniqueIdentity(uuid=email)
e = re.match(self.EMAIL_ADDRESS_REGEX, email, re.UNICODE)
if e:
identity = Identity(email=email, source=self.source)
else:
identity = Identity(username=email, source=self.source)
uid.identities.append(identity)
self._identities[email] = uid
e = re.match(self.EMAIL_ADDRESS_REGEX, alias, re.UNICODE)
if e:
identity = Identity(email=alias, source=self.source)
else:
identity = Identity(username=alias, source=self.source)
uid.identities.append(identity)
# Create unique identities from enrollments list
for email in self.__raw_identities:
# Do we have it from aliases?
if email in self._identities:
uid = self._identities[email]
elif email in self.__raw_aliases:
canonical = self.__raw_aliases[email]
uid = self._identities[canonical]
else:
uid = UniqueIdentity(uuid=email)
identity = Identity(email=email, source=self.source)
uid.identities.append(identity)
self._identities[email] = uid
# Assign enrollments
enrs = self.__raw_identities[email]
enrs.sort(key=lambda r: r[1])
start_date = MIN_PERIOD_DATE
for rol in enrs:
name = rol[0]
org = self._organizations.get(name, None)
if not org:
org = Organization(name=name)
self._organizations[name] = org
end_date = rol[1]
enrollment = Enrollment(start=start_date, end=end_date,
organization=org)
uid.enrollments.append(enrollment)
if end_date != MAX_PERIOD_DATE:
start_date = end_date
def __parse_organizations(self, domain_to_employer):
"""Parse Gitdm organizations"""
# Parse streams
self.__parse_domain_to_employer_stream(domain_to_employer)
for org in self.__raw_orgs:
o = Organization(name=org)
for dom in self.__raw_orgs[org]:
d = Domain(domain=dom, is_top_domain=False)
o.domains.append(d)
self._organizations[org] = o
def __parse_aliases_stream(self, stream):
"""Parse aliases stream.
The stream contains a list of usernames (they can be email addresses
their username aliases. Each line has a username and an alias separated
by tabs. Comment lines start with the hash character (#).
Example:
# List of email aliases
jsmith@example.com jsmith@example.net
jsmith@example.net johnsmith@example.com
jdoe@example.com john_doe@example.com
jdoe@example john_doe@example.com
"""
if not stream:
return
f = self.__parse_aliases_line
for alias_entries in self.__parse_stream(stream, f):
alias = alias_entries[0]
username = alias_entries[1]
self.__raw_aliases[alias] = username
def __parse_email_to_employer_stream(self, stream):
"""Parse email to employer stream.
The stream contains a list of email addresses and their employers.
Each line has an email address and a organization name separated by
tabs. Optionally, the date when the identity withdrew from the
organization can be included followed by a '<' character. Comment
lines start with the hash character (#).
Example:
# List of enrollments
jsmith@example.com Example Company # John Smith
jdoe@example.com Example Company # John Doe
jsmith@example.com Bitergia < 2015-01-01 # John Smith - Bitergia
"""
if not stream:
return
f = self.__parse_email_to_employer_line
for rol in self.__parse_stream(stream, f):
email = rol[0]
org = rol[1]
rol_date = rol[2]
if org not in self.__raw_orgs:
self.__raw_orgs[org] = []
if email not in self.__raw_identities:
self.__raw_identities[email] = [(org, rol_date)]
else:
self.__raw_identities[email].append((org, rol_date))
def __parse_domain_to_employer_stream(self, stream):
"""Parse domain to employer stream.
Each line of the stream has to contain a domain and a organization,
or employer, separated by tabs. Comment lines start with the hash
character (#)
Example:
# Domains from domains.txt
example.org Example
example.com Example
bitergia.com Bitergia
libresoft.es LibreSoft
example.org LibreSoft
"""
if not stream:
return
f = self.__parse_domain_to_employer_line
for o in self.__parse_stream(stream, f):
org = o[0]
dom = o[1]
if org not in self.__raw_orgs:
self.__raw_orgs[org] = []
self.__raw_orgs[org].append(dom)
def __parse_stream(self, stream, parse_line):
"""Generic method to parse gitdm streams"""
if not stream:
raise InvalidFormatError(cause='stream cannot be empty or None')
nline = 0
lines = stream.split('\n')
for line in lines:
nline += 1
# Ignore blank lines and comments
m = re.match(self.LINES_TO_IGNORE_REGEX, line, re.UNICODE)
if m:
continue
m = re.match(self.VALID_LINE_REGEX, line, re.UNICODE)
if not m:
cause = "line %s: invalid format" % str(nline)
raise InvalidFormatError(cause=cause)
try:
result = parse_line(m.group(1), m.group(2))
yield result
except InvalidFormatError as e:
cause = "Skip: '%s' -> line %s: %s" % (line, str(nline), e)
logger.warning(cause)
continue
def __parse_aliases_line(self, raw_alias, raw_username):
"""Parse aliases lines"""
alias = self.__encode(raw_alias)
username = self.__encode(raw_username)
return alias, username
def __parse_email_to_employer_line(self, raw_email, raw_enrollment):
"""Parse email to employer lines"""
e = re.match(self.EMAIL_ADDRESS_REGEX, raw_email, re.UNICODE)
if not e and self.email_validation:
cause = "invalid email format: '%s'" % raw_email
raise InvalidFormatError(cause=cause)
if self.email_validation:
email = e.group('email').strip()
else:
email = raw_email
raw_enrollment = raw_enrollment.strip() if raw_enrollment != ' ' else raw_enrollment
r = re.match(self.ENROLLMENT_REGEX, raw_enrollment, re.UNICODE)
if not r:
cause = "invalid enrollment format: '%s'" % raw_enrollment
raise InvalidFormatError(cause=cause)
org = r.group('organization').strip()
date = r.group('date')
if date:
try:
dt = dateutil.parser.parse(r.group('date'))
except Exception as e:
cause = "invalid date: '%s'" % date
else:
dt = MAX_PERIOD_DATE
email = self.__encode(email)
org = self.__encode(org)
return email, org, dt
def __parse_domain_to_employer_line(self, raw_domain, raw_org):
"""Parse domain to employer lines"""
d = re.match(self.DOMAIN_REGEX, raw_domain, re.UNICODE)
if not d:
cause = "invalid domain format: '%s'" % raw_domain
raise InvalidFormatError(cause=cause)
dom = d.group('domain').strip()
raw_org = raw_org.strip() if raw_org != ' ' else raw_org
o = re.match(self.ORGANIZATION_REGEX, raw_org, re.UNICODE)
if not o:
cause = "invalid organization format: '%s'" % raw_org
raise InvalidFormatError(cause=cause)
org = o.group('organization').strip()
org = self.__encode(org)
dom = self.__encode(dom)
return org, dom
def __encode(self, s):
return s if s else None
| gpl-3.0 | 2,135,665,549,278,258,700 | 32.623342 | 124 | 0.580625 | false |
karesansui/karesansui | karesansui/gadget/static.py | 1 | 1502 | # -*- coding: utf-8 -*-
#
# This file is part of Karesansui.
#
# Copyright (C) 2009-2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import web
from karesansui.lib.rest import Rest, auth
class Static(Rest):
def _GET(self, *param, **params):
self.__template__.dir = 'static/'+param[0]
self.__template__.file = param[1]
self.__template__.media = param[2]
return True
urls = ('/static/(.+)/(.+)\.(js|css|png|gif|jpg|jpeg|ico|jar)', Static,)
| mit | 2,057,773,710,568,235,800 | 40.722222 | 79 | 0.724368 | false |
niphlod/ssis_dash | models/menu.py | 1 | 2114 | # -*- coding: utf-8 -*-
# Copyright 2017 Niphlod <niphlod@gmail.com>
#
# This file is part of ssis_dash.
#
# ssis_dash is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# ssis_dash is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ssis_dash. If not, see <http://www.gnu.org/licenses/>.
response.logo = A("SSIS Dashboard", _class="navbar-brand")
response.title = 'SSIS monitoring'
response.subtitle = 'SSISDB monitoring'
## read more at http://dev.w3.org/html5/markup/meta.name.html
response.meta.author = 'Niphlod <niphlod@gmail.com>'
response.meta.description = 'SSISDB monitoring'
response.meta.keywords = 'SSISDB monitoring'
response.meta.generator = 'Web2py Web Framework'
response.static_version = '0.0.50'
response.static_version_urls = True
response.menu = [
('Home', False, URL('default', 'index'), []),
('Docs', False, URL('docs', 'index'), []),
]
static_files = [
'css/nprogress.css',
'css/adminlte.css',
'css/font-awesome.min.css',
'vendor/morris.css',
'css/layout.css',
'vendor/datatables/css/datatables.utils.min.css',
'js/console.js',
'js/moment.min.js',
'vendor/datatables/js/jquery.dataTables.min.js',
'vendor/datatables/js/datatables.utils.min.js',
'js/nprogress.js',
'js/jquery.pjax.js',
'js/lodash.min.js',
'vendor/raphael.min.js',
'vendor/morris.min.js',
'js/signals.min.js',
'js/uri-iri.min.js',
'js/crossroads.min.js',
'js/ractive.js',
'js/ractive-load.min.js',
'js/keymaster.min.js',
'js/marked.min.js',
'js/app.js'
]
response.files.extend([URL('static', f) for f in static_files])
PJAX_ENV = request.env.http_x_pjax
| gpl-3.0 | -8,550,410,080,521,734,000 | 31.030303 | 76 | 0.690161 | false |
russellb/nova | nova/tests/api/openstack/compute/test_api.py | 1 | 4523 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from lxml import etree
import webob.exc
import webob.dec
from webob import Request
import nova.context
from nova import test
from nova.api import openstack as openstack_api
from nova.api.openstack import wsgi
from nova.tests.api.openstack import fakes
class APITest(test.TestCase):
def _wsgi_app(self, inner_app):
# simpler version of the app than fakes.wsgi_app
return openstack_api.FaultWrapper(inner_app)
def test_malformed_json(self):
req = webob.Request.blank('/')
req.method = 'POST'
req.body = '{'
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_malformed_xml(self):
req = webob.Request.blank('/')
req.method = 'POST'
req.body = '<hi im not xml>'
req.headers["content-type"] = "application/xml"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_vendor_content_type_json(self):
ctype = 'application/vnd.openstack.compute+json'
req = webob.Request.blank('/')
req.headers['Accept'] = ctype
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, ctype)
body = json.loads(res.body)
def test_vendor_content_type_xml(self):
ctype = 'application/vnd.openstack.compute+xml'
req = webob.Request.blank('/')
req.headers['Accept'] = ctype
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, ctype)
body = etree.XML(res.body)
def test_exceptions_are_converted_to_faults_webob_exc(self):
@webob.dec.wsgify
def raise_webob_exc(req):
raise webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
#api.application = raise_webob_exc
api = self._wsgi_app(raise_webob_exc)
resp = Request.blank('/').get_response(api)
self.assertEqual(resp.status_int, 404, resp.body)
def test_exceptions_are_converted_to_faults_api_fault(self):
@webob.dec.wsgify
def raise_api_fault(req):
exc = webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
return wsgi.Fault(exc)
#api.application = raise_api_fault
api = self._wsgi_app(raise_api_fault)
resp = Request.blank('/').get_response(api)
self.assertTrue('itemNotFound' in resp.body, resp.body)
self.assertEqual(resp.status_int, 404, resp.body)
def test_exceptions_are_converted_to_faults_exception(self):
@webob.dec.wsgify
def fail(req):
raise Exception("Threw an exception")
#api.application = fail
api = self._wsgi_app(fail)
resp = Request.blank('/').get_response(api)
self.assertTrue('{"computeFault' in resp.body, resp.body)
self.assertEqual(resp.status_int, 500, resp.body)
def test_exceptions_are_converted_to_faults_exception_xml(self):
@webob.dec.wsgify
def fail(req):
raise Exception("Threw an exception")
#api.application = fail
api = self._wsgi_app(fail)
resp = Request.blank('/.xml').get_response(api)
self.assertTrue('<computeFault' in resp.body, resp.body)
self.assertEqual(resp.status_int, 500, resp.body)
def test_request_id_in_response(self):
req = webob.Request.blank('/')
req.method = 'GET'
context = nova.context.RequestContext('bob', 1)
context.request_id = 'test-req-id'
req.environ['nova.context'] = context
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.headers['x-compute-request-id'], 'test-req-id')
| apache-2.0 | 1,007,506,515,709,438,700 | 33.265152 | 78 | 0.646031 | false |
Heikman/picongpu | src/tools/bin/observer.py | 1 | 1286 | #!/usr/bin/env python2.7
#
# Copyright 2013 Richard Pausch
#
# This file is part of PIConGPU.
#
# PIConGPU is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PIConGPU is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PIConGPU.
# If not, see <http://www.gnu.org/licenses/>.
#
from numpy import *
for angle_id_extern in arange(481):
N_phi_split = 32
N_theta = 16
my_index_theta = angle_id_extern / N_phi_split
my_index_phi = angle_id_extern % N_phi_split
phi_range = pi
theta_range = pi/2.0
delta_phi = phi_range / (N_phi_split - 1)
delta_theta = theta_range / (N_theta - 1)
theta = my_index_theta * delta_theta + 0.5*pi
phi = my_index_phi * delta_phi
x = sin(theta)*cos(phi)
y = sin(theta)*sin(phi)
z = cos(theta)
print around([x, y, z], 3)
| gpl-3.0 | 3,014,636,239,808,558,600 | 25.791667 | 70 | 0.664075 | false |
kubeflow/kfserving | python/kfserving/test/test_knative_url.py | 1 | 1378 | # Copyright 2020 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
KFServing
Python SDK for KFServing # noqa: E501
OpenAPI spec version: v0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import kfserving
from kfserving.models.knative_url import KnativeURL # noqa: E501
from kfserving.rest import ApiException
class TestKnativeURL(unittest.TestCase):
"""KnativeURL unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testKnativeURL(self):
"""Test KnativeURL"""
# FIXME: construct object with mandatory attributes with example values
# model = kfserving.models.knative_url.KnativeURL() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 3,694,224,059,007,468,500 | 24.518519 | 79 | 0.703919 | false |