code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
__copyright__ = 'Copyright(c) Gordon Elliott 2017'
"""
"""
from a_tuin.in_out.gsheet_integration import get_gsheet_fields, load_class
from a_tuin.metadata import StringField, Mapping
from glod.db.fund import FundRestriction, Fund
from glod.db.account import AccountQuery
FUND_RESTRICTION_MAP = {
'01. unrestricted': FundRestriction.Unrestricted,
'02. restricted': FundRestriction.Restricted,
'03. endowment': FundRestriction.Endowment,
}
def conform_fund_restriction(value, _):
return FUND_RESTRICTION_MAP.get(value.lower(), FundRestriction.Unrestricted)
def conform_yes_no(value, _):
return value.lower() == 'yes'
def funds_from_gsheet(session, extract_from_detailed_ledger):
fund_gsheet = get_gsheet_fields(
Fund,
{
'name': 'fund',
'restriction': 'type',
'is parish fund': 'parish fund',
'is realised': 'realised',
'account': 'bank account id'
}
)
fund_gsheet['restriction'] = StringField('restriction')
fund_gsheet['parish fund'] = StringField('parish fund')
fund_gsheet['realised'] = StringField('realised')
fund_gsheet['bank account id'] = StringField('bank account id')
field_casts = {
'type': conform_fund_restriction,
'parish fund': conform_yes_no,
'realised': conform_yes_no,
'bank account id': AccountQuery(session).instance_finder('reference_no', int)
}
fund_mapping = Mapping(fund_gsheet, Fund.constructor_parameters, field_casts=field_casts)
funds = extract_from_detailed_ledger(
'funds',
'A11',
('fund', 'type', 'parish fund', 'realised', 'bank account id')
)
load_class(session, funds, fund_mapping, Fund)
| gordon-elliott/glod | src/glod/in_out/fund.py | Python | mit | 1,742 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# quadcopter documentation build configuration file, created by
# sphinx-quickstart on Fri Nov 27 18:40:16 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'quadcopter'
copyright = '2015, Chris Laws'
author = 'Chris Laws'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'quadcopterdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'quadcopter.tex', 'quadcopter Documentation',
'Chris Laws', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'quadcopter', 'quadcopter Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'quadcopter', 'quadcopter Documentation',
author, 'quadcopter', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| claws/quadcopter | doc/conf.py | Python | mit | 9,210 |
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
# Copyright (c) 2014, Niko Usai <mogui83@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import zipfile
import plistlib
from .bplist import BPlistReader
class IPAparser(object):
"""docstring for IPAparser"""
def __init__(self, path):
self.path = path
self.zip_obj = None
def findFile(self, fileToFind):
for filepath in self.zip_obj.namelist():
fname = filepath.split('/')[-1]
if fname == fileToFind:
return filepath
return None
def findInfoPlist(self):
for filepath in self.zip_obj.namelist():
parts = filepath.split('/')
if len(parts) > 2 and parts[2] == 'Info.plist':
return filepath
return None
def saveFileTo(self, zipfilepath, newfilename):
raw = self.zip_obj.read(zipfilepath)
f = open(newfilename, "w")
f.write(raw)
f.close()
def parseInfo(self):
self.zip_obj = zipfile.ZipFile(self.path, 'r')
InfoPath = self.findInfoPlist()
if not InfoPath:
raise Exception("Info.plist not found")
raw = self.zip_obj.read(InfoPath)
try:
InfoPlist = plistlib.readPlistFromString(raw)
except:
InfoPlist = BPlistReader.plistWithString(raw)
return InfoPlist | mogui/pyipa | pyipa/IPAparser.py | Python | mit | 2,263 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import numpy as np
import re
import math
import itertools
import collections
import warnings
from monty.json import MSONable
from pymatgen.core.periodic_table import get_el_sp, Specie
from pymatgen.core.structure import Structure
from pymatgen.core.lattice import Lattice
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.util.coord import pbc_diff
"""
This module provides classes to define everything related to band structures.
"""
__author__ = "Geoffroy Hautier, Shyue Ping Ong, Michael Kocher"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Geoffroy Hautier"
__email__ = "geoffroy@uclouvain.be"
__status__ = "Development"
__date__ = "March 14, 2012"
class Kpoint(MSONable):
"""
Class to store kpoint objects. A kpoint is defined with a lattice and frac
or cartesian coordinates syntax similar than the site object in
pymatgen.core.structure.
Args:
coords: coordinate of the kpoint as a numpy array
lattice: A pymatgen.core.lattice.Lattice lattice object representing
the reciprocal lattice of the kpoint
to_unit_cell: Translates fractional coordinate to the basic unit
cell, i.e., all fractional coordinates satisfy 0 <= a < 1.
Defaults to False.
coords_are_cartesian: Boolean indicating if the coordinates given are
in cartesian or fractional coordinates (by default fractional)
label: the label of the kpoint if any (None by default)
"""
def __init__(self, coords, lattice, to_unit_cell=False,
coords_are_cartesian=False, label=None):
self._lattice = lattice
self._fcoords = lattice.get_fractional_coords(coords) \
if coords_are_cartesian else coords
self._label = label
if to_unit_cell:
for i in range(len(self._fcoords)):
self._fcoords[i] -= math.floor(self._fcoords[i])
self._ccoords = lattice.get_cartesian_coords(self._fcoords)
@property
def lattice(self):
"""
The lattice associated with the kpoint. It's a
pymatgen.core.lattice.Lattice object
"""
return self._lattice
@property
def label(self):
"""
The label associated with the kpoint
"""
return self._label
@property
def frac_coords(self):
"""
The fractional coordinates of the kpoint as a numpy array
"""
return np.copy(self._fcoords)
@property
def cart_coords(self):
"""
The cartesian coordinates of the kpoint as a numpy array
"""
return np.copy(self._ccoords)
@property
def a(self):
"""
Fractional a coordinate of the kpoint
"""
return self._fcoords[0]
@property
def b(self):
"""
Fractional b coordinate of the kpoint
"""
return self._fcoords[1]
@property
def c(self):
"""
Fractional c coordinate of the kpoint
"""
return self._fcoords[2]
def __str__(self):
"""
Returns a string with fractional, cartesian coordinates and label
"""
return "{} {} {}".format(self.frac_coords, self.cart_coords,
self.label)
def as_dict(self):
"""
Json-serializable dict representation of a kpoint
"""
return {"lattice": self.lattice.as_dict(),
"fcoords": list(self.frac_coords),
"ccoords": list(self.cart_coords), "label": self.label,
"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
class BandStructure:
"""
This is the most generic band structure data possible
it's defined by a list of kpoints + energies for each of them
.. attribute:: kpoints:
the list of kpoints (as Kpoint objects) in the band structure
.. attribute:: lattice_rec
the reciprocal lattice of the band structure.
.. attribute:: efermi
the fermi energy
.. attribute:: is_spin_polarized
True if the band structure is spin-polarized, False otherwise
.. attribute:: bands
The energy eigenvalues as a {spin: ndarray}. Note that the use of an
ndarray is necessary for computational as well as memory efficiency
due to the large amount of numerical data. The indices of the ndarray
are [band_index, kpoint_index].
.. attribute:: nb_bands
returns the number of bands in the band structure
.. attribute:: structure
returns the structure
.. attribute:: projections
The projections as a {spin: ndarray}. Note that the use of an
ndarray is necessary for computational as well as memory efficiency
due to the large amount of numerical data. The indices of the ndarray
are [band_index, kpoint_index, orbital_index, ion_index].
Args:
kpoints: list of kpoint as numpy arrays, in frac_coords of the
given lattice by default
eigenvals: dict of energies for spin up and spin down
{Spin.up:[][],Spin.down:[][]}, the first index of the array
[][] refers to the band and the second to the index of the
kpoint. The kpoints are ordered according to the order of the
kpoints array. If the band structure is not spin polarized, we
only store one data set under Spin.up
lattice: The reciprocal lattice as a pymatgen Lattice object.
Pymatgen uses the physics convention of reciprocal lattice vectors
WITH a 2*pi coefficient
efermi: fermi energy
labels_dict: (dict) of {} this links a kpoint (in frac coords or
cartesian coordinates depending on the coords) to a label.
coords_are_cartesian: Whether coordinates are cartesian.
structure: The crystal structure (as a pymatgen Structure object)
associated with the band structure. This is needed if we
provide projections to the band structure
projections: dict of orbital projections as {spin: ndarray}. The
indices of the ndarrayare [band_index, kpoint_index, orbital_index,
ion_index].If the band structure is not spin polarized, we only
store one data set under Spin.up.
"""
def __init__(self, kpoints, eigenvals, lattice, efermi, labels_dict=None,
coords_are_cartesian=False, structure=None, projections=None):
self.efermi = efermi
self.lattice_rec = lattice
self.kpoints = []
self.labels_dict = {}
self.structure = structure
self.projections = projections or {}
self.projections = {k: np.array(v) for k, v in self.projections.items()}
if labels_dict is None:
labels_dict = {}
if len(self.projections) != 0 and self.structure is None:
raise Exception("if projections are provided a structure object"
" needs also to be given")
for k in kpoints:
# let see if this kpoint has been assigned a label
label = None
for c in labels_dict:
if np.linalg.norm(k - np.array(labels_dict[c])) < 0.0001:
label = c
self.labels_dict[label] = Kpoint(
k, lattice, label=label,
coords_are_cartesian=coords_are_cartesian)
self.kpoints.append(
Kpoint(k, lattice, label=label,
coords_are_cartesian=coords_are_cartesian))
self.bands = {spin: np.array(v) for spin, v in eigenvals.items()}
self.nb_bands = len(eigenvals[Spin.up])
self.is_spin_polarized = len(self.bands) == 2
def get_projection_on_elements(self):
"""
Method returning a dictionary of projections on elements.
Returns:
a dictionary in the {Spin.up:[][{Element:values}],
Spin.down:[][{Element:values}]} format
if there is no projections in the band structure
returns an empty dict
"""
result = {}
structure = self.structure
for spin, v in self.projections.items():
result[spin] = [[collections.defaultdict(float)
for i in range(len(self.kpoints))]
for j in range(self.nb_bands)]
for i, j, k in itertools.product(range(self.nb_bands),
range(len(self.kpoints)),
range(structure.num_sites)):
result[spin][i][j][str(structure[k].specie)] += np.sum(
v[i, j, :, k])
return result
def get_projections_on_elements_and_orbitals(self, el_orb_spec):
"""
Method returning a dictionary of projections on elements and specific
orbitals
Args:
el_orb_spec: A dictionary of Elements and Orbitals for which we want
to have projections on. It is given as: {Element:[orbitals]},
e.g., {'Cu':['d','s']}
Returns:
A dictionary of projections on elements in the
{Spin.up:[][{Element:{orb:values}}],
Spin.down:[][{Element:{orb:values}}]} format
if there is no projections in the band structure returns an empty
dict.
"""
result = {}
structure = self.structure
el_orb_spec = {get_el_sp(el): orbs for el, orbs in el_orb_spec.items()}
for spin, v in self.projections.items():
result[spin] = [[{str(e): collections.defaultdict(float)
for e in el_orb_spec}
for i in range(len(self.kpoints))]
for j in range(self.nb_bands)]
for i, j, k in itertools.product(
range(self.nb_bands), range(len(self.kpoints)),
range(structure.num_sites)):
sp = structure[k].specie
for orb_i in range(len(v[i][j])):
o = Orbital(orb_i).name[0]
if sp in el_orb_spec:
if o in el_orb_spec[sp]:
result[spin][i][j][str(sp)][o] += v[i][j][
orb_i][k]
return result
def is_metal(self, efermi_tol=1e-4):
"""
Check if the band structure indicates a metal by looking if the fermi
level crosses a band.
Returns:
True if a metal, False if not
"""
for spin, values in self.bands.items():
for i in range(self.nb_bands):
if np.any(values[i, :] - self.efermi < -efermi_tol) and \
np.any(values[i, :] - self.efermi > efermi_tol):
return True
return False
def get_vbm(self):
"""
Returns data about the VBM.
Returns:
dict as {"band_index","kpoint_index","kpoint","energy"}
- "band_index": A dict with spin keys pointing to a list of the
indices of the band containing the VBM (please note that you
can have several bands sharing the VBM) {Spin.up:[],
Spin.down:[]}
- "kpoint_index": The list of indices in self.kpoints for the
kpoint vbm. Please note that there can be several
kpoint_indices relating to the same kpoint (e.g., Gamma can
occur at different spots in the band structure line plot)
- "kpoint": The kpoint (as a kpoint object)
- "energy": The energy of the VBM
- "projections": The projections along sites and orbitals of the
VBM if any projection data is available (else it is an empty
dictionnary). The format is similar to the projections field in
BandStructure: {spin:{'Orbital': [proj]}} where the array
[proj] is ordered according to the sites in structure
"""
if self.is_metal():
return {"band_index": [], "kpoint_index": [],
"kpoint": [], "energy": None, "projections": {}}
max_tmp = -float("inf")
index = None
kpointvbm = None
for spin, v in self.bands.items():
for i, j in zip(*np.where(v < self.efermi)):
if v[i, j] > max_tmp:
max_tmp = float(v[i, j])
index = j
kpointvbm = self.kpoints[j]
list_ind_kpts = []
if kpointvbm.label is not None:
for i in range(len(self.kpoints)):
if self.kpoints[i].label == kpointvbm.label:
list_ind_kpts.append(i)
else:
list_ind_kpts.append(index)
# get all other bands sharing the vbm
list_ind_band = collections.defaultdict(list)
for spin in self.bands:
for i in range(self.nb_bands):
if math.fabs(self.bands[spin][i][index] - max_tmp) < 0.001:
list_ind_band[spin].append(i)
proj = {}
for spin, v in self.projections.items():
if len(list_ind_band[spin]) == 0:
continue
proj[spin] = v[list_ind_band[spin][0]][list_ind_kpts[0]]
return {'band_index': list_ind_band,
'kpoint_index': list_ind_kpts,
'kpoint': kpointvbm, 'energy': max_tmp,
'projections': proj}
def get_cbm(self):
"""
Returns data about the CBM.
Returns:
{"band_index","kpoint_index","kpoint","energy"}
- "band_index": A dict with spin keys pointing to a list of the
indices of the band containing the VBM (please note that you
can have several bands sharing the VBM) {Spin.up:[],
Spin.down:[]}
- "kpoint_index": The list of indices in self.kpoints for the
kpoint vbm. Please note that there can be several
kpoint_indices relating to the same kpoint (e.g., Gamma can
occur at different spots in the band structure line plot)
- "kpoint": The kpoint (as a kpoint object)
- "energy": The energy of the VBM
- "projections": The projections along sites and orbitals of the
VBM if any projection data is available (else it is an empty
dictionnary). The format is similar to the projections field in
BandStructure: {spin:{'Orbital': [proj]}} where the array
[proj] is ordered according to the sites in structure
"""
if self.is_metal():
return {"band_index": [], "kpoint_index": [],
"kpoint": [], "energy": None, "projections": {}}
max_tmp = float("inf")
index = None
kpointcbm = None
for spin, v in self.bands.items():
for i, j in zip(*np.where(v >= self.efermi)):
if v[i, j] < max_tmp:
max_tmp = float(v[i, j])
index = j
kpointcbm = self.kpoints[j]
list_index_kpoints = []
if kpointcbm.label is not None:
for i in range(len(self.kpoints)):
if self.kpoints[i].label == kpointcbm.label:
list_index_kpoints.append(i)
else:
list_index_kpoints.append(index)
# get all other bands sharing the cbm
list_index_band = collections.defaultdict(list)
for spin in self.bands:
for i in range(self.nb_bands):
if math.fabs(self.bands[spin][i][index] - max_tmp) < 0.001:
list_index_band[spin].append(i)
proj = {}
for spin, v in self.projections.items():
if len(list_index_band[spin]) == 0:
continue
proj[spin] = v[list_index_band[spin][0]][list_index_kpoints[0]]
return {'band_index': list_index_band,
'kpoint_index': list_index_kpoints,
'kpoint': kpointcbm, 'energy': max_tmp,
'projections': proj}
def get_band_gap(self):
"""
Returns band gap data.
Returns:
A dict {"energy","direct","transition"}:
"energy": band gap energy
"direct": A boolean telling if the gap is direct or not
"transition": kpoint labels of the transition (e.g., "\\Gamma-X")
"""
if self.is_metal():
return {"energy": 0.0, "direct": False, "transition": None}
cbm = self.get_cbm()
vbm = self.get_vbm()
result = dict(direct=False, energy=0.0, transition=None)
result["energy"] = cbm["energy"] - vbm["energy"]
if (cbm["kpoint"].label is not None and cbm["kpoint"].label == vbm[
"kpoint"].label) \
or np.linalg.norm(cbm["kpoint"].cart_coords
- vbm["kpoint"].cart_coords) < 0.01:
result["direct"] = True
result["transition"] = "-".join(
[str(c.label) if c.label is not None else
str("(") + ",".join(["{0:.3f}".format(c.frac_coords[i])
for i in range(3)])
+ str(")") for c in [vbm["kpoint"], cbm["kpoint"]]])
return result
def get_direct_band_gap_dict(self):
"""
Returns a dictionary of information about the direct
band gap
Returns:
a dictionary of the band gaps indexed by spin
along with their band indices and k-point index
"""
if self.is_metal():
raise ValueError("get_direct_band_gap_dict should"
"only be used with non-metals")
direct_gap_dict = {}
for spin, v in self.bands.items():
above = v[np.all(v > self.efermi, axis=1)]
min_above = np.min(above, axis=0)
below = v[np.all(v < self.efermi, axis=1)]
max_below = np.max(below, axis=0)
diff = min_above - max_below
kpoint_index = np.argmin(diff)
band_indices = [np.argmax(below[:, kpoint_index]),
np.argmin(above[:, kpoint_index]) + len(below)]
direct_gap_dict[spin] = {"value": diff[kpoint_index],
"kpoint_index": kpoint_index,
"band_indices": band_indices}
return direct_gap_dict
def get_direct_band_gap(self):
"""
Returns the direct band gap.
Returns:
the value of the direct band gap
"""
if self.is_metal():
return 0.0
dg = self.get_direct_band_gap_dict()
return min(v['value'] for v in dg.values())
def get_sym_eq_kpoints(self, kpoint, cartesian=False, tol=1e-2):
"""
Returns a list of unique symmetrically equivalent k-points.
Args:
kpoint (1x3 array): coordinate of the k-point
cartesian (bool): kpoint is in cartesian or fractional coordinates
tol (float): tolerance below which coordinates are considered equal
Returns:
([1x3 array] or None): if structure is not available returns None
"""
if not self.structure:
return None
sg = SpacegroupAnalyzer(self.structure)
symmops = sg.get_point_group_operations(cartesian=cartesian)
points = np.dot(kpoint, [m.rotation_matrix for m in symmops])
rm_list = []
# identify and remove duplicates from the list of equivalent k-points:
for i in range(len(points) - 1):
for j in range(i + 1, len(points)):
if np.allclose(pbc_diff(points[i], points[j]), [0, 0, 0], tol):
rm_list.append(i)
break
return np.delete(points, rm_list, axis=0)
def get_kpoint_degeneracy(self, kpoint, cartesian=False, tol=1e-2):
"""
Returns degeneracy of a given k-point based on structure symmetry
Args:
kpoint (1x3 array): coordinate of the k-point
cartesian (bool): kpoint is in cartesian or fractional coordinates
tol (float): tolerance below which coordinates are considered equal
Returns:
(int or None): degeneracy or None if structure is not available
"""
all_kpts = self.get_sym_eq_kpoints(kpoint, cartesian, tol=tol)
if all_kpts is not None:
return len(all_kpts)
def as_dict(self):
"""
Json-serializable dict representation of BandStructureSymmLine.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"lattice_rec": self.lattice_rec.as_dict(), "efermi": self.efermi,
"kpoints": []}
# kpoints are not kpoint objects dicts but are frac coords (this makes
# the dict smaller and avoids the repetition of the lattice
for k in self.kpoints:
d["kpoints"].append(k.as_dict()["fcoords"])
d["bands"] = {str(int(spin)): self.bands[spin]
for spin in self.bands}
d["is_metal"] = self.is_metal()
vbm = self.get_vbm()
d["vbm"] = {"energy": vbm["energy"],
"kpoint_index": vbm["kpoint_index"],
"band_index": {str(int(spin)): vbm["band_index"][spin]
for spin in vbm["band_index"]},
'projections': {str(spin): v.tolist() for spin, v in vbm[
'projections'].items()}}
cbm = self.get_cbm()
d['cbm'] = {'energy': cbm['energy'],
'kpoint_index': cbm['kpoint_index'],
'band_index': {str(int(spin)): cbm['band_index'][spin]
for spin in cbm['band_index']},
'projections': {str(spin): v.tolist() for spin, v in cbm[
'projections'].items()}}
d['band_gap'] = self.get_band_gap()
d['labels_dict'] = {}
d['is_spin_polarized'] = self.is_spin_polarized
for c in self.labels_dict:
d['labels_dict'][c] = self.labels_dict[c].as_dict()['fcoords']
d['projections'] = {}
if len(self.projections) != 0:
d['structure'] = self.structure.as_dict()
d['projections'] = {str(int(spin)): np.array(v).tolist()
for spin, v in self.projections.items()}
return d
@classmethod
def from_dict(cls, d):
"""
Create from dict.
Args:
A dict with all data for a band structure object.
Returns:
A BandStructure object
"""
labels_dict = d['labels_dict']
projections = {}
structure = None
if isinstance(list(d['bands'].values())[0], dict):
eigenvals = {Spin(int(k)): np.array(d['bands'][k]['data'])
for k in d['bands']}
else:
eigenvals = {Spin(int(k)): d['bands'][k] for k in d['bands']}
if 'structure' in d:
structure = Structure.from_dict(d['structure'])
if d.get('projections'):
projections = {Spin(int(spin)): np.array(v)
for spin, v in d["projections"].items()}
return BandStructure(
d['kpoints'], eigenvals,
Lattice(d['lattice_rec']['matrix']), d['efermi'],
labels_dict, structure=structure, projections=projections)
@classmethod
def from_old_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d['labels_dict'].items()}
projections = {}
structure = None
if 'projections' in d and len(d['projections']) != 0:
structure = Structure.from_dict(d['structure'])
projections = {}
for spin in d['projections']:
dd = []
for i in range(len(d['projections'][spin])):
ddd = []
for j in range(len(d['projections'][spin][i])):
dddd = []
for k in range(len(d['projections'][spin][i][j])):
ddddd = []
orb = Orbital(k).name
for l in range(len(d['projections'][spin][i][j][
orb])):
ddddd.append(d['projections'][spin][i][j][
orb][l])
dddd.append(np.array(ddddd))
ddd.append(np.array(dddd))
dd.append(np.array(ddd))
projections[Spin(int(spin))] = np.array(dd)
return BandStructure(
d['kpoints'], {Spin(int(k)): d['bands'][k] for k in d['bands']},
Lattice(d['lattice_rec']['matrix']), d['efermi'],
labels_dict, structure=structure, projections=projections)
class BandStructureSymmLine(BandStructure, MSONable):
"""
This object stores band structures along selected (symmetry) lines in the
Brillouin zone. We call the different symmetry lines (ex: \\Gamma to Z)
"branches".
Args:
kpoints: list of kpoint as numpy arrays, in frac_coords of the
given lattice by default
eigenvals: dict of energies for spin up and spin down
{Spin.up:[][],Spin.down:[][]}, the first index of the array
[][] refers to the band and the second to the index of the
kpoint. The kpoints are ordered according to the order of the
kpoints array. If the band structure is not spin polarized, we
only store one data set under Spin.up.
lattice: The reciprocal lattice.
Pymatgen uses the physics convention of reciprocal lattice vectors
WITH a 2*pi coefficient
efermi: fermi energy
label_dict: (dict) of {} this link a kpoint (in frac coords or
cartesian coordinates depending on the coords).
coords_are_cartesian: Whether coordinates are cartesian.
structure: The crystal structure (as a pymatgen Structure object)
associated with the band structure. This is needed if we
provide projections to the band structure.
projections: dict of orbital projections as {spin: ndarray}. The
indices of the ndarrayare [band_index, kpoint_index, orbital_index,
ion_index].If the band structure is not spin polarized, we only
store one data set under Spin.up.
"""
def __init__(self, kpoints, eigenvals, lattice, efermi, labels_dict,
coords_are_cartesian=False, structure=None,
projections=None):
super().__init__(
kpoints, eigenvals, lattice, efermi, labels_dict,
coords_are_cartesian, structure, projections)
self.distance = []
self.branches = []
one_group = []
branches_tmp = []
# get labels and distance for each kpoint
previous_kpoint = self.kpoints[0]
previous_distance = 0.0
previous_label = self.kpoints[0].label
for i in range(len(self.kpoints)):
label = self.kpoints[i].label
if label is not None and previous_label is not None:
self.distance.append(previous_distance)
else:
self.distance.append(
np.linalg.norm(self.kpoints[i].cart_coords -
previous_kpoint.cart_coords) +
previous_distance)
previous_kpoint = self.kpoints[i]
previous_distance = self.distance[i]
if label:
if previous_label:
if len(one_group) != 0:
branches_tmp.append(one_group)
one_group = []
previous_label = label
one_group.append(i)
if len(one_group) != 0:
branches_tmp.append(one_group)
for b in branches_tmp:
self.branches.append(
{"start_index": b[0], "end_index": b[-1],
"name": str(self.kpoints[b[0]].label) + "-" +
str(self.kpoints[b[-1]].label)})
self.is_spin_polarized = False
if len(self.bands) == 2:
self.is_spin_polarized = True
def get_equivalent_kpoints(self, index):
"""
Returns the list of kpoint indices equivalent (meaning they are the
same frac coords) to the given one.
Args:
index: the kpoint index
Returns:
a list of equivalent indices
TODO: now it uses the label we might want to use coordinates instead
(in case there was a mislabel)
"""
# if the kpoint has no label it can"t have a repetition along the band
# structure line object
if self.kpoints[index].label is None:
return [index]
list_index_kpoints = []
for i in range(len(self.kpoints)):
if self.kpoints[i].label == self.kpoints[index].label:
list_index_kpoints.append(i)
return list_index_kpoints
def get_branch(self, index):
"""
Returns in what branch(es) is the kpoint. There can be several
branches.
Args:
index: the kpoint index
Returns:
A list of dictionaries [{"name","start_index","end_index","index"}]
indicating all branches in which the k_point is. It takes into
account the fact that one kpoint (e.g., \\Gamma) can be in several
branches
"""
to_return = []
for i in self.get_equivalent_kpoints(index):
for b in self.branches:
if b["start_index"] <= i <= b["end_index"]:
to_return.append({"name": b["name"],
"start_index": b["start_index"],
"end_index": b["end_index"],
"index": i})
return to_return
def apply_scissor(self, new_band_gap):
"""
Apply a scissor operator (shift of the CBM) to fit the given band gap.
If it's a metal. We look for the band crossing the fermi level
and shift this one up. This will not work all the time for metals!
Args:
new_band_gap: the band gap the scissor band structure need to have.
Returns:
a BandStructureSymmLine object with the applied scissor shift
"""
if self.is_metal():
# moves then the highest index band crossing the fermi level
# find this band...
max_index = -1000
# spin_index = None
for i in range(self.nb_bands):
below = False
above = False
for j in range(len(self.kpoints)):
if self.bands[Spin.up][i][j] < self.efermi:
below = True
if self.bands[Spin.up][i][j] > self.efermi:
above = True
if above and below:
if i > max_index:
max_index = i
# spin_index = Spin.up
if self.is_spin_polarized:
below = False
above = False
for j in range(len(self.kpoints)):
if self.bands[Spin.down][i][j] < self.efermi:
below = True
if self.bands[Spin.down][i][j] > self.efermi:
above = True
if above and below:
if i > max_index:
max_index = i
# spin_index = Spin.down
old_dict = self.as_dict()
shift = new_band_gap
for spin in old_dict['bands']:
for k in range(len(old_dict['bands'][spin])):
for v in range(len(old_dict['bands'][spin][k])):
if k >= max_index:
old_dict['bands'][spin][k][v] = \
old_dict['bands'][spin][k][v] + shift
else:
shift = new_band_gap - self.get_band_gap()['energy']
old_dict = self.as_dict()
for spin in old_dict['bands']:
for k in range(len(old_dict['bands'][spin])):
for v in range(len(old_dict['bands'][spin][k])):
if old_dict['bands'][spin][k][v] >= \
old_dict['cbm']['energy']:
old_dict['bands'][spin][k][v] = \
old_dict['bands'][spin][k][v] + shift
old_dict['efermi'] = old_dict['efermi'] + shift
return BandStructureSymmLine.from_dict(old_dict)
def as_dict(self):
"""
Json-serializable dict representation of BandStructureSymmLine.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"lattice_rec": self.lattice_rec.as_dict(), "efermi": self.efermi,
"kpoints": []}
# kpoints are not kpoint objects dicts but are frac coords (this makes
# the dict smaller and avoids the repetition of the lattice
for k in self.kpoints:
d["kpoints"].append(k.as_dict()["fcoords"])
d["branches"] = self.branches
d["bands"] = {str(int(spin)): self.bands[spin].tolist()
for spin in self.bands}
d["is_metal"] = self.is_metal()
vbm = self.get_vbm()
d["vbm"] = {"energy": vbm["energy"],
"kpoint_index": vbm["kpoint_index"],
"band_index": {str(int(spin)): vbm["band_index"][spin]
for spin in vbm["band_index"]},
'projections': {str(spin): v.tolist() for spin, v in vbm[
'projections'].items()}}
cbm = self.get_cbm()
d['cbm'] = {'energy': cbm['energy'],
'kpoint_index': cbm['kpoint_index'],
'band_index': {str(int(spin)): cbm['band_index'][spin]
for spin in cbm['band_index']},
'projections': {str(spin): v.tolist() for spin, v in cbm[
'projections'].items()}}
d['band_gap'] = self.get_band_gap()
d['labels_dict'] = {}
d['is_spin_polarized'] = self.is_spin_polarized
# MongoDB does not accept keys starting with $. Add a blanck space to fix the problem
for c in self.labels_dict:
mongo_key = c if not c.startswith("$") else " " + c
d['labels_dict'][mongo_key] = self.labels_dict[c].as_dict()[
'fcoords']
if len(self.projections) != 0:
d['structure'] = self.structure.as_dict()
d['projections'] = {str(int(spin)): np.array(v).tolist()
for spin, v in self.projections.items()}
return d
@classmethod
def from_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
try:
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d['labels_dict'].items()}
projections = {}
structure = None
if d.get('projections'):
if isinstance(d["projections"]['1'][0][0], dict):
raise ValueError("Old band structure dict format detected!")
structure = Structure.from_dict(d['structure'])
projections = {Spin(int(spin)): np.array(v)
for spin, v in d["projections"].items()}
return BandStructureSymmLine(
d['kpoints'], {Spin(int(k)): d['bands'][k]
for k in d['bands']},
Lattice(d['lattice_rec']['matrix']), d['efermi'],
labels_dict, structure=structure, projections=projections)
except:
warnings.warn("Trying from_dict failed. Now we are trying the old "
"format. Please convert your BS dicts to the new "
"format. The old format will be retired in pymatgen "
"5.0.")
return BandStructureSymmLine.from_old_dict(d)
@classmethod
def from_old_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d['labels_dict'].items()}
projections = {}
structure = None
if 'projections' in d and len(d['projections']) != 0:
structure = Structure.from_dict(d['structure'])
projections = {}
for spin in d['projections']:
dd = []
for i in range(len(d['projections'][spin])):
ddd = []
for j in range(len(d['projections'][spin][i])):
dddd = []
for k in range(len(d['projections'][spin][i][j])):
ddddd = []
orb = Orbital(k).name
for l in range(len(d['projections'][spin][i][j][
orb])):
ddddd.append(d['projections'][spin][i][j][
orb][l])
dddd.append(np.array(ddddd))
ddd.append(np.array(dddd))
dd.append(np.array(ddd))
projections[Spin(int(spin))] = np.array(dd)
return BandStructureSymmLine(
d['kpoints'], {Spin(int(k)): d['bands'][k]
for k in d['bands']},
Lattice(d['lattice_rec']['matrix']), d['efermi'],
labels_dict, structure=structure, projections=projections)
class LobsterBandStructureSymmLine(BandStructureSymmLine):
def apply_scissor(self, new_band_gap):
"""
Apply a scissor operator (shift of the CBM) to fit the given band gap.
If it's a metal. We look for the band crossing the fermi level
and shift this one up. This will not work all the time for metals!
Args:
new_band_gap: the band gap the scissor band structure need to have.
Returns:
a BandStructureSymmLine object with the applied scissor shift
"""
if self.is_metal():
# moves then the highest index band crossing the fermi level
# find this band...
max_index = -1000
# spin_index = None
for i in range(self.nb_bands):
below = False
above = False
for j in range(len(self.kpoints)):
if self.bands[Spin.up][i][j] < self.efermi:
below = True
if self.bands[Spin.up][i][j] > self.efermi:
above = True
if above and below:
if i > max_index:
max_index = i
# spin_index = Spin.up
if self.is_spin_polarized:
below = False
above = False
for j in range(len(self.kpoints)):
if self.bands[Spin.down][i][j] < self.efermi:
below = True
if self.bands[Spin.down][i][j] > self.efermi:
above = True
if above and below:
if i > max_index:
max_index = i
# spin_index = Spin.down
old_dict = self.as_dict()
shift = new_band_gap
for spin in old_dict['bands']:
for k in range(len(old_dict['bands'][spin])):
for v in range(len(old_dict['bands'][spin][k])):
if k >= max_index:
old_dict['bands'][spin][k][v] = \
old_dict['bands'][spin][k][v] + shift
else:
shift = new_band_gap - self.get_band_gap()['energy']
old_dict = self.as_dict()
for spin in old_dict['bands']:
for k in range(len(old_dict['bands'][spin])):
for v in range(len(old_dict['bands'][spin][k])):
if old_dict['bands'][spin][k][v] >= \
old_dict['cbm']['energy']:
old_dict['bands'][spin][k][v] = \
old_dict['bands'][spin][k][v] + shift
old_dict['efermi'] = old_dict['efermi'] + shift
return LobsterBandStructureSymmLine.from_dict(old_dict)
def as_dict(self):
"""
Json-serializable dict representation of BandStructureSymmLine.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"lattice_rec": self.lattice_rec.as_dict(), "efermi": self.efermi,
"kpoints": []}
# kpoints are not kpoint objects dicts but are frac coords (this makes
# the dict smaller and avoids the repetition of the lattice
for k in self.kpoints:
d["kpoints"].append(k.as_dict()["fcoords"])
d["branches"] = self.branches
d["bands"] = {str(int(spin)): self.bands[spin].tolist()
for spin in self.bands}
d["is_metal"] = self.is_metal()
vbm = self.get_vbm()
d["vbm"] = {"energy": vbm["energy"],
"kpoint_index": [int(x) for x in vbm["kpoint_index"]],
"band_index": {str(int(spin)): vbm["band_index"][spin]
for spin in vbm["band_index"]},
'projections': {str(spin): v for spin, v in vbm[
'projections'].items()}}
cbm = self.get_cbm()
d['cbm'] = {'energy': cbm['energy'],
'kpoint_index': [int(x) for x in cbm["kpoint_index"]],
'band_index': {str(int(spin)): cbm['band_index'][spin]
for spin in cbm['band_index']},
'projections': {str(spin): v for spin, v in cbm[
'projections'].items()}}
d['band_gap'] = self.get_band_gap()
d['labels_dict'] = {}
d['is_spin_polarized'] = self.is_spin_polarized
# MongoDB does not accept keys starting with $. Add a blanck space to fix the problem
for c in self.labels_dict:
mongo_key = c if not c.startswith("$") else " " + c
d['labels_dict'][mongo_key] = self.labels_dict[c].as_dict()[
'fcoords']
if len(self.projections) != 0:
d['structure'] = self.structure.as_dict()
d['projections'] = {str(int(spin)): np.array(v).tolist()
for spin, v in self.projections.items()}
return d
@classmethod
def from_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
try:
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d['labels_dict'].items()}
projections = {}
structure = None
if d.get('projections'):
if isinstance(d["projections"]['1'][0][0], dict):
raise ValueError("Old band structure dict format detected!")
structure = Structure.from_dict(d['structure'])
projections = {Spin(int(spin)): np.array(v)
for spin, v in d["projections"].items()}
print(projections)
return LobsterBandStructureSymmLine(
d['kpoints'], {Spin(int(k)): d['bands'][k]
for k in d['bands']},
Lattice(d['lattice_rec']['matrix']), d['efermi'],
labels_dict, structure=structure, projections=projections)
except:
warnings.warn("Trying from_dict failed. Now we are trying the old "
"format. Please convert your BS dicts to the new "
"format. The old format will be retired in pymatgen "
"5.0.")
return LobsterBandStructureSymmLine.from_old_dict(d)
@classmethod
def from_old_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d['labels_dict'].items()}
projections = {}
structure = None
if 'projections' in d and len(d['projections']) != 0:
structure = Structure.from_dict(d['structure'])
projections = {}
for spin in d['projections']:
dd = []
for i in range(len(d['projections'][spin])):
ddd = []
for j in range(len(d['projections'][spin][i])):
dddd = []
ddd.append(d['projections'][spin][i][j])
dd.append(np.array(ddd))
projections[Spin(int(spin))] = np.array(dd)
return LobsterBandStructureSymmLine(
d['kpoints'], {Spin(int(k)): d['bands'][k]
for k in d['bands']},
Lattice(d['lattice_rec']['matrix']), d['efermi'],
labels_dict, structure=structure, projections=projections)
def get_projection_on_elements(self):
"""
Method returning a dictionary of projections on elements.
It sums over all available orbitals for each element.
Returns:
a dictionary in the {Spin.up:[][{Element:values}],
Spin.down:[][{Element:values}]} format
if there is no projections in the band structure
returns an empty dict
"""
result = {}
structure = self.structure
for spin, v in self.projections.items():
result[spin] = [[collections.defaultdict(float)
for i in range(len(self.kpoints))]
for j in range(self.nb_bands)]
for i, j in itertools.product(range(self.nb_bands),
range(len(self.kpoints))):
for key, item in v[i][j].items():
for key2, item2 in item.items():
specie = str(Specie(re.split(r"[0-9]+", key)[0]))
result[spin][i][j][specie] += item2
return result
def get_projections_on_elements_and_orbitals(self, el_orb_spec):
"""
Method returning a dictionary of projections on elements and specific
orbitals
Args:
el_orb_spec: A dictionary of Elements and Orbitals for which we want
to have projections on. It is given as: {Element:[orbitals]},
e.g., {'Si':['3s','3p']} or {'Si':['3s','3p_x', '3p_y', '3p_z']} depending on input files
Returns:
A dictionary of projections on elements in the
{Spin.up:[][{Element:{orb:values}}],
Spin.down:[][{Element:{orb:values}}]} format
if there is no projections in the band structure returns an empty
dict.
"""
result = {}
structure = self.structure
el_orb_spec = {get_el_sp(el): orbs for el, orbs in el_orb_spec.items()}
for spin, v in self.projections.items():
result[spin] = [[{str(e): collections.defaultdict(float)
for e in el_orb_spec}
for i in range(len(self.kpoints))]
for j in range(self.nb_bands)]
for i, j in itertools.product(range(self.nb_bands),
range(len(self.kpoints))):
for key, item in v[i][j].items():
for key2, item2 in item.items():
specie = str(Specie(re.split(r"[0-9]+", key)[0]))
if get_el_sp(str(specie)) in el_orb_spec:
if key2 in el_orb_spec[get_el_sp(str(specie))]:
result[spin][i][j][specie][key2] += item2
return result
def get_reconstructed_band_structure(list_bs, efermi=None):
"""
This method takes a list of band structures and reconstructs
one band structure object from all of them.
This is typically very useful when you split non self consistent
band structure runs in several independent jobs and want to merge back
the results
Args:
list_bs: A list of BandStructure or BandStructureSymmLine objects.
efermi: The Fermi energy of the reconstructed band structure. If
None is assigned an average of all the Fermi energy in each
object in the list_bs is used.
Returns:
A BandStructure or BandStructureSymmLine object (depending on
the type of the list_bs objects)
"""
if efermi is None:
efermi = sum([b.efermi for b in list_bs]) / len(list_bs)
kpoints = []
labels_dict = {}
rec_lattice = list_bs[0].lattice_rec
nb_bands = min([list_bs[i].nb_bands for i in range(len(list_bs))])
kpoints = np.concatenate([[k.frac_coords for k in bs.kpoints]
for bs in list_bs])
dicts = [bs.labels_dict for bs in list_bs]
labels_dict = {k: v.frac_coords for d in dicts for k, v in d.items()}
eigenvals = {}
eigenvals[Spin.up] = np.concatenate([bs.bands[Spin.up][:nb_bands]
for bs in list_bs], axis=1)
if list_bs[0].is_spin_polarized:
eigenvals[Spin.down] = np.concatenate([bs.bands[Spin.down][:nb_bands]
for bs in list_bs], axis=1)
projections = {}
if len(list_bs[0].projections) != 0:
projs = [bs.projections[Spin.up][:nb_bands] for bs in list_bs]
projections[Spin.up] = np.concatenate(projs, axis=1)
if list_bs[0].is_spin_polarized:
projs = [bs.projections[Spin.down][:nb_bands] for bs in list_bs]
projections[Spin.down] = np.concatenate(projs, axis=1)
if isinstance(list_bs[0], BandStructureSymmLine):
return BandStructureSymmLine(kpoints, eigenvals, rec_lattice,
efermi, labels_dict,
structure=list_bs[0].structure,
projections=projections)
else:
return BandStructure(kpoints, eigenvals, rec_lattice, efermi,
labels_dict, structure=list_bs[0].structure,
projections=projections)
| dongsenfo/pymatgen | pymatgen/electronic_structure/bandstructure.py | Python | mit | 52,567 |
import csv
import sys
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(111)
def parse(file_name):
"Parses the data sets from the csv file we are given to work with"
try:
file = open(file_name)
except IOError:
print "Failed to open the data file"
sys.exit()
rawFile = csv.reader(file) # Reading the csv file into a raw form
rawData = list(rawFile) # Converting the raw data into list from.
file.close()
return rawData
def toXandY(unorderedData):
"This method converts seperates x and y co-ordinates for plotting"
orderedData = []
orderedData.append([]) # Add a new sublist every time
orderedData.append([]) # Add a new sublist every time
listSize = len(unorderedData)
for x in range(0, listSize):
orderedData[0].append(unorderedData[x][0]) # Seperates the x-cords
for y in range(0, listSize):
orderedData[1].append(unorderedData[y][1]) # Seperates the y-cords
return orderedData
def main():
newData = []
f_line_x = []
f_line_y = []
file_name = "data.csv"
data = parse(file_name) # Calling the parse funtion we made
labels = data.pop(0) # Necessary evil
frontier_size = int(data.pop(0)[0])
list_size = len(data)
for i in range(0, list_size): # Converting the string list to float
newData.append([]) # Add a new sublsit every time
for j in range(0, 2): # Append converted data to the new list
newData[i].append(float(data[i][j]))
DataXandY = toXandY(newData) # DataXandY -> [[Xs][Ys]]
i = 0
while i < frontier_size:
i+=1
f_line_x.append(DataXandY[0].pop(0))
f_line_y.append(DataXandY[1].pop(0))
plt.xlabel(labels[0])
plt.ylabel(labels[1])
plt.title("Pareto dominance")
plt.plot(DataXandY[0], DataXandY[1], "o", color="g") # Plot all points
plt.plot(f_line_x, f_line_y, "-o", color="r") # Plot frontier line
plt.gca().set_aspect('equal', adjustable='box')
plt.show()
if __name__ == "__main__":
main()
| GZakharov1525/SOFE3770 | Assignment2/plot_lines.py | Python | gpl-3.0 | 2,119 |
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test fee estimation code
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Construct 2 trivial P2SH's and the ScriptSigs that spend them
# So we can create many many transactions without needing to spend
# time signing.
P2SH_1 = "2MySexEGVzZpRgNQ1JdjdP5bRETznm3roQ2" # P2SH of "OP_1 OP_DROP"
P2SH_2 = "2NBdpwq8Aoo1EEKEXPNrKvr5xQr3M9UfcZA" # P2SH of "OP_2 OP_DROP"
# Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2
# 4 bytes of OP_TRUE and push 2-byte redeem script of "OP_1 OP_DROP" or "OP_2 OP_DROP"
SCRIPT_SIG = ["0451025175", "0451025275"]
def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
'''
Create and send a transaction with a random fee.
The transaction pays to a trival P2SH script, and assumes that its inputs
are of the same form.
The function takes a list of confirmed outputs and unconfirmed outputs
and attempts to use the confirmed list first for its inputs.
It adds the newly created outputs to the unconfirmed list.
Returns (raw transaction, fee)
'''
# It's best to exponentially distribute our random fees
# because the buckets are exponentially spaced.
# Exponentially distributed from 1-128 * fee_increment
rand_fee = float(fee_increment)*(1.1892**random.randint(0,28))
# Total fee ranges from min_fee to min_fee + 127*fee_increment
fee = min_fee - fee_increment + satoshi_round(rand_fee)
inputs = []
total_in = Decimal("0.00000000")
while total_in <= (amount + fee) and len(conflist) > 0:
t = conflist.pop(0)
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]} )
if total_in <= amount + fee:
while total_in <= (amount + fee) and len(unconflist) > 0:
t = unconflist.pop(0)
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]} )
if total_in <= amount + fee:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount+fee, total_in))
outputs = {}
outputs[P2SH_1] = total_in - amount - fee
outputs[P2SH_2] = amount
rawtx = from_node.createrawtransaction(inputs, outputs)
# Createrawtransaction constructions a transaction that is ready to be signed
# These transactions don't need to be signed, but we still have to insert the ScriptSig
# that will satisfy the ScriptPubKey.
completetx = rawtx[0:10]
inputnum = 0
for inp in inputs:
completetx += rawtx[10+82*inputnum:82+82*inputnum]
completetx += SCRIPT_SIG[inp["vout"]]
completetx += rawtx[84+82*inputnum:92+82*inputnum]
inputnum += 1
completetx += rawtx[10+82*inputnum:]
txid = from_node.sendrawtransaction(completetx, True)
unconflist.append({ "txid" : txid, "vout" : 0 , "amount" : total_in - amount - fee})
unconflist.append({ "txid" : txid, "vout" : 1 , "amount" : amount})
return (completetx, fee)
def split_inputs(from_node, txins, txouts, initial_split = False):
'''
We need to generate a lot of very small inputs so we can generate a ton of transactions
and they will have low priority.
This function takes an input from txins, and creates and sends a transaction
which splits the value into 2 outputs which are appended to txouts.
'''
prevtxout = txins.pop()
inputs = []
outputs = {}
inputs.append({ "txid" : prevtxout["txid"], "vout" : prevtxout["vout"] })
half_change = satoshi_round(prevtxout["amount"]/2)
rem_change = prevtxout["amount"] - half_change - Decimal("0.00001000")
outputs[P2SH_1] = half_change
outputs[P2SH_2] = rem_change
rawtx = from_node.createrawtransaction(inputs, outputs)
# If this is the initial split we actually need to sign the transaction
# Otherwise we just need to insert the property ScriptSig
if (initial_split) :
completetx = from_node.signrawtransaction(rawtx)["hex"]
else :
completetx = rawtx[0:82] + SCRIPT_SIG[prevtxout["vout"]] + rawtx[84:]
txid = from_node.sendrawtransaction(completetx, True)
txouts.append({ "txid" : txid, "vout" : 0 , "amount" : half_change})
txouts.append({ "txid" : txid, "vout" : 1 , "amount" : rem_change})
def check_estimates(node, fees_seen, max_invalid, print_estimates = True):
'''
This function calls estimatefee and verifies that the estimates
meet certain invariants.
'''
all_estimates = [ node.estimatefee(i) for i in range(1,26) ]
if print_estimates:
print([str(all_estimates[e-1]) for e in [1,2,3,6,15,25]])
delta = 1.0e-6 # account for rounding error
last_e = max(fees_seen)
for e in [x for x in all_estimates if x >= 0]:
# Estimates should be within the bounds of what transactions fees actually were:
if float(e)+delta < min(fees_seen) or float(e)-delta > max(fees_seen):
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
%(float(e), min(fees_seen), max(fees_seen)))
# Estimates should be monotonically decreasing
if float(e)-delta > last_e:
raise AssertionError("Estimated fee (%f) larger than last fee (%f) for lower number of confirms"
%(float(e),float(last_e)))
last_e = e
valid_estimate = False
invalid_estimates = 0
for i,e in enumerate(all_estimates): # estimate is for i+1
if e >= 0:
valid_estimate = True
# estimatesmartfee should return the same result
assert_equal(node.estimatesmartfee(i+1)["feerate"], e)
else:
invalid_estimates += 1
# estimatesmartfee should still be valid
approx_estimate = node.estimatesmartfee(i+1)["feerate"]
answer_found = node.estimatesmartfee(i+1)["blocks"]
assert(approx_estimate > 0)
assert(answer_found > i+1)
# Once we're at a high enough confirmation count that we can give an estimate
# We should have estimates for all higher confirmation counts
if valid_estimate:
raise AssertionError("Invalid estimate appears at higher confirm count than valid estimate")
# Check on the expected number of different confirmation counts
# that we might not have valid estimates for
if invalid_estimates > max_invalid:
raise AssertionError("More than (%d) invalid estimates"%(max_invalid))
return all_estimates
class EstimateFeeTest(BitcoinTestFramework):
def setup_network(self):
'''
We'll setup the network to have 3 nodes that all mine with different parameters.
But first we need to use one node to create a lot of small low priority outputs
which we will use to generate our transactions.
'''
self.nodes = []
# Use node0 to mine blocks for input splitting
self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000",
"-relaypriority=0", "-whitelist=127.0.0.1"]))
print("This test is time consuming, please be patient")
print("Splitting inputs to small size so we can generate low priority tx's")
self.txouts = []
self.txouts2 = []
# Split a coinbase into two transaction puzzle outputs
split_inputs(self.nodes[0], self.nodes[0].listunspent(0), self.txouts, True)
# Mine
while (len(self.nodes[0].getrawmempool()) > 0):
self.nodes[0].generate(1)
# Repeatedly split those 2 outputs, doubling twice for each rep
# Use txouts to monitor the available utxo, since these won't be tracked in wallet
reps = 0
while (reps < 5):
#Double txouts to txouts2
while (len(self.txouts)>0):
split_inputs(self.nodes[0], self.txouts, self.txouts2)
while (len(self.nodes[0].getrawmempool()) > 0):
self.nodes[0].generate(1)
#Double txouts2 to txouts
while (len(self.txouts2)>0):
split_inputs(self.nodes[0], self.txouts2, self.txouts)
while (len(self.nodes[0].getrawmempool()) > 0):
self.nodes[0].generate(1)
reps += 1
print("Finished splitting")
# Now we can connect the other nodes, didn't want to connect them earlier
# so the estimates would not be affected by the splitting transactions
# Node1 mines small blocks but that are bigger than the expected transaction rate,
# and allows free transactions.
# NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
# (17k is room enough for 110 or so transactions)
self.nodes.append(start_node(1, self.options.tmpdir,
["-blockprioritysize=1500", "-blockmaxsize=17000",
"-maxorphantx=1000", "-relaypriority=0", "-debug=estimatefee"]))
connect_nodes(self.nodes[1], 0)
# Node2 is a stingy miner, that
# produces too small blocks (room for only 55 or so transactions)
node2args = ["-blockprioritysize=0", "-blockmaxsize=8000", "-maxorphantx=1000", "-relaypriority=0"]
self.nodes.append(start_node(2, self.options.tmpdir, node2args))
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[2], 1)
self.is_network_split = False
self.sync_all()
def transact_and_mine(self, numblocks, mining_node):
min_fee = Decimal("0.00001")
# We will now mine numblocks blocks generating on average 100 transactions between each block
# We shuffle our confirmed txout set before each set of transactions
# small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible
# resorting to tx's that depend on the mempool when those run out
for i in range(numblocks):
random.shuffle(self.confutxo)
for j in range(random.randrange(100-50,100+50)):
from_index = random.randint(1,2)
(txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo,
self.memutxo, Decimal("0.005"), min_fee, min_fee)
tx_kbytes = (len(txhex) // 2) / 1000.0
self.fees_per_kb.append(float(fee)/tx_kbytes)
sync_mempools(self.nodes[0:3],.1)
mined = mining_node.getblock(mining_node.generate(1)[0],True)["tx"]
sync_blocks(self.nodes[0:3],.1)
#update which txouts are confirmed
newmem = []
for utx in self.memutxo:
if utx["txid"] in mined:
self.confutxo.append(utx)
else:
newmem.append(utx)
self.memutxo = newmem
def run_test(self):
self.fees_per_kb = []
self.memutxo = []
self.confutxo = self.txouts # Start with the set of confirmed txouts after splitting
print("Will output estimates for 1/2/3/6/15/25 blocks")
for i in xrange(2):
print("Creating transactions and mining them with a block size that can't keep up")
# Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine
self.transact_and_mine(10, self.nodes[2])
check_estimates(self.nodes[1], self.fees_per_kb, 14)
print("Creating transactions and mining them at a block size that is just big enough")
# Generate transactions while mining 10 more blocks, this time with node1
# which mines blocks with capacity just above the rate that transactions are being created
self.transact_and_mine(10, self.nodes[1])
check_estimates(self.nodes[1], self.fees_per_kb, 2)
# Finish by mining a normal-sized block:
while len(self.nodes[1].getrawmempool()) > 0:
self.nodes[1].generate(1)
sync_blocks(self.nodes[0:3],.1)
print("Final estimates after emptying mempools")
check_estimates(self.nodes[1], self.fees_per_kb, 2)
if __name__ == '__main__':
EstimateFeeTest().main()
| Kangmo/bitcoin | qa/rpc-tests/smartfees.py | Python | mit | 12,523 |
import copy
import operator
empty = object()
def unpickle_lazyobject(wrapped):
"""
Used to unpickle lazy objects. Just return its argument, which will be the
wrapped object.
"""
return wrapped
def new_method_proxy(func):
def inner(self, *args):
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
return inner
class LazyObject(object):
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
# Avoid infinite recursion when tracing __init__ (#19456).
_wrapped = None
def __init__(self):
self._wrapped = empty
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
raise NotImplementedError('subclasses of LazyObject must provide a _setup() method')
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. We're going to have to initialize the wrapped
# object to successfully pickle it, so we might as well just pickle the
# wrapped object since they're supposed to act the same way.
#
# Unfortunately, if we try to simply act like the wrapped object, the ruse
# will break down when pickle gets our id(). Thus we end up with pickle
# thinking, in effect, that we are a distinct object from the wrapped
# object, but with the same __dict__. This can cause problems (see #25389).
#
# So instead, we define our own __reduce__ method and custom unpickler. We
# pickle the wrapped object as the unpickler's argument, so that pickle
# will pickle it normally, and then the unpickler simply returns its
# argument.
def __reduce__(self):
if self._wrapped is empty:
self._setup()
return (unpickle_lazyobject, (self._wrapped,))
# We have to explicitly override __getstate__ so that older versions of
# pickle don't try to pickle the __dict__ (which in the case of a
# SimpleLazyObject may contain a lambda). The value will end up being
# ignored by our __reduce__ and custom unpickler.
def __getstate__(self):
return {}
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__, because the
# latter is proxied.
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
# Introspection support
__dir__ = new_method_proxy(dir)
# Need to pretend to be the wrapped class, for the sake of objects that
# care about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# List/Tuple/Dictionary methods support
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains) | avara1986/gozokia | gozokia/utils/functional.py | Python | mit | 4,105 |
"""
@file basic_rilsacorridor3.py
@author Daniel Krajzewicz
@date 2014-09-01
@version $Id: basic_rilsacorridor3.py 22608 2017-01-17 06:28:54Z behrisch $
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2012-2017 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from __future__ import absolute_import
from __future__ import print_function
from . import *
import os
import math
import sumolib.net.generator.grid as netGenerator
import sumolib.net.generator.demand as demandGenerator
from sumolib.net.generator.network import *
flowsRiLSA1 = [
["nmp", [
["ms", 359, 9],
["me", 59, 9],
["mw", 64, 12]
]],
["wmp", [
["me", 508, 10],
["mn", 80, 14],
["ms", 130, 2]
]],
["emp", [
["mw", 571, 10],
["mn", 57, 9],
["ms", 47, 3]
]],
["smp", [
["mn", 354, 2],
["me", 49, 2],
["mw", 92, 2]
]]
]
class Scenario_BasicRiLSACorridor3(Scenario):
NAME = "BasicRiLSACorridor3"
THIS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), NAME)
TLS_FILE = "tls.add.xml"
NET_FILE = "network.net.xml"
def __init__(self, name, params, withDefaultDemand=True):
Scenario.__init__(self, self.THIS_DIR)
self.params = params
self.demandName = self.fullPath("routes.rou.xml")
self.netName = self.fullPath(self.NET_FILE)
# network
if fileNeedsRebuild(self.fullPath(self.NET_FILE), "netconvert"):
print("Network in '%s' needs to be rebuild" % self.netName)
# , Lane(dirs="l", disallowed="pedestrian")]
lanes = [Lane(dirs="s", allowed="pedestrian"), Lane(
dirs="rs", disallowed="pedestrian")]
defaultEdge = Edge(numLanes=2, maxSpeed=13.89, lanes=lanes)
defaultEdge.addSplit(100, 0, 1)
defaultEdge.lanes[-1].dirs = "l"
netGen = netGenerator.grid(5, 3, None, defaultEdge)
for n in netGen._nodes:
nid = n.split("/")
nid[0] = int(nid[0])
nid[1] = int(nid[1])
if nid[0] > 0 and nid[0] < 6 and nid[1] > 0 and nid[1] < 4:
netGen._nodes[n].addCrossing(
"%s/%s_to_%s.-100" % (nid[0] - 1, nid[1], n), "%s_to_%s/%s" % (n, nid[0] - 1, nid[1]))
netGen._nodes[n].addCrossing(
"%s/%s_to_%s.-100" % (nid[0] + 1, nid[1], n), "%s_to_%s/%s" % (n, nid[0] + 1, nid[1]))
netGen._nodes[n].addCrossing(
"%s/%s_to_%s.-100" % (nid[0], nid[1] - 1, n), "%s_to_%s/%s" % (n, nid[0], nid[1] - 1))
netGen._nodes[n].addCrossing(
"%s/%s_to_%s.-100" % (nid[0], nid[1] + 1, n), "%s_to_%s/%s" % (n, nid[0], nid[1] + 1))
# not nice, the network name should be given/returned
netGen.build(self.netName)
if True:
fdow = open(self.fullPath(self.TLS_FILE), "w")
fdow.write('<additional>\n\n')
for y in range(1, 4):
for x in range(1, 6):
eedge = "%s/%s_to_%s/%s.-100" % (x - 1, y, x, y)
wedge = "%s/%s_to_%s/%s.-100" % (x + 1, y, x, y)
nedge = "%s/%s_to_%s/%s.-100" % (x, y + 1, x, y)
sedge = "%s/%s_to_%s/%s.-100" % (x, y - 1, x, y)
fdow.write(
' <tlLogic id="%s/%s" type="actuated" programID="adapted" offset="0">\n' % (x, y))
fdow.write(
' <phase duration="31" state="rrrrrGGgrrrrrGGgGrGr" minDur="10" maxDur="50" type="target;decisional" targetLanes="%s_1 %s_2 %s_1 %s_2"/>\n' % (eedge, eedge, wedge, wedge))
fdow.write(
' <phase duration="4" state="rrrrryygrrrrryygrrrr" type="transient"/>\n')
fdow.write(
' <phase duration="6" state="rrrrrrrGrrrrrrrGrrrr" minDur="2" maxDur="20" type="decisional" targetLanes="%s_2 %s_2"/>\n' % (eedge, wedge))
fdow.write(
' <phase duration="4" state="rrrrrrryrrrrrrryrrrr" type="transient"/>\n')
fdow.write(
' <phase duration="4" state="rrrrrrrrrrrrrrrrrrrr" type="transient;commit"/>\n')
fdow.write(
' <phase duration="31" state="rGGgrrrrrGGgrrrrrGrG" minDur="10" maxDur="50" type="target;decisional" targetLanes="%s_1 %s_2 %s_1 %s_2"/>\n' % (sedge, sedge, nedge, nedge))
fdow.write(
' <phase duration="4" state="ryygrrrrryygrrrrrrrr" type="transient"/>\n')
fdow.write(
' <phase duration="6" state="rrrGrrrrrrrGrrrrrrrr" minDur="2" maxDur="20" type="decisional" targetLanes="%s_2 %s_2"/>\n' % (sedge, nedge))
fdow.write(
' <phase duration="4" state="rrryrrrrrrryrrrrrrrr" type="transient"/>\n')
fdow.write(
' <phase duration="4" state="rrrrrrrrrrrrrrrrrrrr" type="transient;commit"/>\n')
fdow.write(' </tlLogic>\n\n')
fdow.write('</additional>\n')
# demand
if withDefaultDemand:
self.demand = demandGenerator.Demand()
for f in flowsRiLSA1:
for oe, rel in enumerate(f[1]):
flow = int(rel[1] * .75)
prob = rel[2] / 100.
iprob = 1. - prob
pkwEprob = iprob * self.params["equipment-rate"]
pkwNprob = iprob - pkwEprob
lkwEprob = prob * self.params["equipment-rate"]
lkwNprob = prob - lkwEprob
for ie in range(2, 5): # over input
via = []
if f[0] == "nmp":
iedge = "%s/4_to_%s/3" % (ie, ie)
for ve in range(5, 0, -1):
via.append("%s/%s_to_%s/%s" %
(ie, ve, ie, ve - 1))
if f[0] == "smp":
iedge = "%s/0_to_%s/1" % (ie, ie)
for ve in range(0, 5, 1):
via.append("%s/%s_to_%s/%s" %
(ie, ve, ie, ve + 1))
if f[0] == "wmp":
if ie > 2:
continue
iedge = "0/%s_to_1/%s" % (ie, ie)
for ve in range(0, 5, 1):
via.append("%s/%s_to_%s/%s" %
(ve, ie, ve + 1, ie))
if f[0] == "emp":
if ie > 2:
continue
iedge = "6/%s_to_5/%s" % (ie, ie)
for ve in range(5, 0, -1):
via.append("%s/%s_to_%s/%s" %
(ve, ie, ve - 1, ie))
if oe == 0:
# if ie<2 or ie>2: continue # discard vehicles not
# passing the center
if rel[0] == "mn":
oedge = "%s/3_to_%s/4.-100" % (ie, ie)
if rel[0] == "ms":
oedge = "%s/1_to_%s/0.-100" % (ie, ie)
if rel[0] == "mw":
oedge = "1/%s_to_0/%s.-100" % (ie, ie)
if rel[0] == "me":
oedge = "5/%s_to_6/%s.-100" % (ie, ie)
self.demand.addStream(demandGenerator.Stream(iedge + "__" + oedge, 0, 3600, flow, iedge, oedge,
{"passenger": pkwEprob, "COLOMBO_undetectable_passenger": pkwNprob, "hdv": lkwEprob, "COLOMBO_undetectable_hdv": lkwNprob}))
continue
for oee in range(1, 4):
if rel[0] == "mn":
oedge = "%s/3_to_%s/4.-100" % (oee, oee)
if rel[0] == "ms":
oedge = "%s/1_to_%s/0.-100" % (oee, oee)
if rel[0] == "mw":
oedge = "1/%s_to_0/%s.-100" % (oee, oee)
if rel[0] == "me":
oedge = "5/%s_to_6/%s.-100" % (oee, oee)
# if (ie<2 or ie>2) and (oee<2 or oee>2): continue
# # discard vehicles not passing the center
self.demand.addStream(demandGenerator.Stream(iedge + "__" + oedge, 0, 3600, int(flow / 3.), iedge, oedge,
{"passenger": pkwEprob, "COLOMBO_undetectable_passenger": pkwNprob, "hdv": lkwEprob, "COLOMBO_undetectable_hdv": lkwNprob}))
if fileNeedsRebuild(self.demandName, "duarouter"):
self.demand.build(0, 86400, self.netName, self.demandName)
shutil.copy(
self.fullPath(self.NET_FILE), self.sandboxPath(self.NET_FILE))
shutil.copy(
self.fullPath(self.TLS_FILE), self.sandboxPath(self.TLS_FILE))
print("Huah")
| 702nADOS/sumo | tools/sumolib/scenario/scenarios/basic_rilsacorridor3.py | Python | gpl-3.0 | 9,899 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
from math import log
from scipy.special import gammaln
from cgpm.primitives.distribution import DistributionGpm
from cgpm.utils import general as gu
class Crp(DistributionGpm):
"""Crp distribution over open set categoricals represented as integers.
X[n] ~ Crp(\alpha | X[1],...,X[n-1])
"""
def __init__(
self, outputs, inputs,
hypers=None, params=None, distargs=None, rng=None):
DistributionGpm.__init__(
self, outputs, inputs, hypers, params, distargs, rng)
# Distargs.
self.N = 0
self.data = OrderedDict()
self.counts = OrderedDict()
# Hyperparameters.
if hypers is None: hypers = {}
self.alpha = hypers.get('alpha', 1.)
def incorporate(self, rowid, observation, inputs=None):
DistributionGpm.incorporate(self, rowid, observation, inputs)
x = int(observation[self.outputs[0]])
self.N += 1
if x not in self.counts:
self.counts[x] = 0
self.counts[x] += 1
self.data[rowid] = x
def unincorporate(self, rowid):
x = self.data.pop(rowid)
self.N -= 1
self.counts[x] -= 1
if self.counts[x] == 0:
del self.counts[x]
def logpdf(self, rowid, targets, constraints=None, inputs=None):
# Do not call DistributionGpm.logpdf since crp allows observed rowid.
assert not inputs
assert not constraints
assert targets.keys() == self.outputs
x = int(targets[self.outputs[0]])
if rowid in self.data:
return 0 if self.data[rowid] == x else -float('inf')
return Crp.calc_predictive_logp(x, self.N, self.counts, self.alpha)
@gu.simulate_many
def simulate(self, rowid, targets, constraints=None, inputs=None, N=None):
DistributionGpm.simulate(self, rowid, targets, constraints, inputs, N)
if rowid in self.data:
x = self.data[rowid]
else:
K = sorted(self.counts) + [max(self.counts) + 1] if self.counts\
else [0]
logps = [self.logpdf(rowid, {targets[0]: x}, None) for x in K]
x = gu.log_pflip(logps, array=K, rng=self.rng)
return {self.outputs[0]: x}
def logpdf_score(self):
return Crp.calc_logpdf_marginal(self.N, self.counts, self.alpha)
##################
# NON-GPM METHOD #
##################
def transition_params(self):
return
def set_hypers(self, hypers):
assert hypers['alpha'] > 0
self.alpha = hypers['alpha']
def get_hypers(self):
return {'alpha': self.alpha}
def get_params(self):
return {}
def get_suffstats(self):
return {'N': self.N, 'counts': self.counts.items()}
def get_distargs(self):
return {}
# Some Gibbs utils.
def gibbs_logps(self, rowid, m=1):
"""Compute the CRP probabilities for a Gibbs transition of rowid,
with table counts Nk, table assignments Z, and m auxiliary tables."""
assert rowid in self.data
assert 0 < m
singleton = self.singleton(rowid)
p_aux = self.alpha / float(m)
p_rowid = p_aux if singleton else self.counts[self.data[rowid]]-1
tables = self.gibbs_tables(rowid, m=m)
def p_table(t):
if t == self.data[rowid]: return p_rowid # rowid table.
if t not in self.counts: return p_aux # auxiliary table.
return self.counts[t] # regular table.
return [log(p_table(t)) for t in tables]
def gibbs_tables(self, rowid, m=1):
"""Retrieve a list of possible tables for rowid.
If rowid is an existing customer, then the standard Gibbs proposal
tables are returned (i.e. with the rowid unincorporated). If
rowid was a singleton table, then the table is re-used as a proposal
and m-1 additional auxiliary tables are proposed, else m auxiliary
tables are returned.
If rowid is a new customer, then the returned tables are from the
predictive distribution, (using m auxiliary tables always).
"""
assert 0 < m
K = sorted(self.counts)
singleton = self.singleton(rowid)
m_aux = m - 1 if singleton else m
t_aux = [max(self.counts) + 1 + m for m in range(m_aux)]
return K + t_aux
def singleton(self, rowid):
return self.counts[self.data[rowid]] == 1 if rowid in self.data else 0
@staticmethod
def construct_hyper_grids(X, n_grid=30):
grids = dict()
grids['alpha'] = gu.log_linspace(1./len(X), len(X), n_grid)
return grids
@staticmethod
def name():
return 'crp'
@staticmethod
def is_collapsed():
return True
@staticmethod
def is_continuous():
return False
@staticmethod
def is_conditional():
return False
@staticmethod
def is_numeric():
return False
##################
# HELPER METHODS #
##################
@staticmethod
def calc_predictive_logp(x, N, counts, alpha):
numerator = counts.get(x, alpha)
denominator = N + alpha
return log(numerator) - log(denominator)
@staticmethod
def calc_logpdf_marginal(N, counts, alpha):
# http://gershmanlab.webfactional.com/pubs/GershmanBlei12.pdf#page=4 (eq 8)
return len(counts) * log(alpha) + sum(gammaln(counts.values())) \
+ gammaln(alpha) - gammaln(N + alpha)
| probcomp/cgpm | src/primitives/crp.py | Python | apache-2.0 | 6,189 |
# -*- coding: utf-8 -*-
from invoke import task
DEFAULT_OPTS = '--backtrack=50 --deep --verbose --verbose-conflicts'
@task
def emerge(ctx, default_opts=DEFAULT_OPTS, opts='', target='world', tag=''):
ctx.run('ionice -c3 {} /usr/bin/emerge {} {} {}'.format(tag, default_opts, opts, target))
| lmiphay/gentoo-oam | oam/tasks/merge.py | Python | gpl-2.0 | 297 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class TaskResourceRequest(object):
"""
.. note:: Evolving
A task resource request. This is used in conjuntion with the
:class:`pyspark.resource.ResourceProfile` to programmatically specify the resources
needed for an RDD that will be applied at the stage level. The amount is specified
as a Double to allow for saying you want more then 1 task per resource. Valid values
are less than or equal to 0.5 or whole numbers.
Use :class:`pyspark.resource.TaskResourceRequests` class as a convenience API.
:param resourceName: Name of the resource
:param amount: Amount requesting as a Double to support fractional resource requests.
Valid values are less than or equal to 0.5 or whole numbers.
.. versionadded:: 3.1.0
"""
def __init__(self, resourceName, amount):
self._name = resourceName
self._amount = float(amount)
@property
def resourceName(self):
return self._name
@property
def amount(self):
return self._amount
class TaskResourceRequests(object):
"""
.. note:: Evolving
A set of task resource requests. This is used in conjuntion with the
:class:`pyspark.resource.ResourceProfileBuilder` to programmatically specify the resources
needed for an RDD that will be applied at the stage level.
.. versionadded:: 3.1.0
"""
_CPUS = "cpus"
def __init__(self, _jvm=None, _requests=None):
from pyspark import SparkContext
_jvm = _jvm or SparkContext._jvm
if _jvm is not None:
self._java_task_resource_requests = \
SparkContext._jvm.org.apache.spark.resource.TaskResourceRequests()
if _requests is not None:
for k, v in _requests.items():
if k == self._CPUS:
self._java_task_resource_requests.cpus(int(v.amount))
else:
self._java_task_resource_requests.resource(v.resourceName, v.amount)
else:
self._java_task_resource_requests = None
self._task_resources = {}
def cpus(self, amount):
if self._java_task_resource_requests is not None:
self._java_task_resource_requests.cpus(amount)
else:
self._task_resources[self._CPUS] = TaskResourceRequest(self._CPUS, amount)
return self
def resource(self, resourceName, amount):
if self._java_task_resource_requests is not None:
self._java_task_resource_requests.resource(resourceName, float(amount))
else:
self._task_resources[resourceName] = TaskResourceRequest(resourceName, amount)
return self
@property
def requests(self):
if self._java_task_resource_requests is not None:
result = {}
taskRes = self._java_task_resource_requests.requestsJMap()
for k, v in taskRes.items():
result[k] = TaskResourceRequest(v.resourceName(), v.amount())
return result
else:
return self._task_resources
| zuotingbing/spark | python/pyspark/resource/taskrequests.py | Python | apache-2.0 | 3,865 |
# coding=utf-8
from __future__ import print_function
__author__ = 'Mario Romera Fernández'
__license__ = 'GNU General Public License v2.0'
def to_unicode(obj, encoding="utf-8"):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
def comment(sentence):
"""
Print header and footer of code delimiter in the form of:
################################################################################
################################### SENTENCE ###################################
################################# END SENTENCE #################################
################################################################################
:param sentence: basestring
"""
_sentence = to_unicode(sentence)
_sentence = u" {} ".format(_sentence)
print(u"{}".format(u"#" * 80))
print(u"{:#^80}".format(_sentence.upper()))
_sentenceEnd = u" END" + _sentence
print(u"{:#^80}".format(_sentenceEnd.upper()))
print(u"{}".format(u"#" * 80))
def remove_trailing_zeros(l=None):
"""
Removes trailing zeros from a list
:param l: list
"""
if not l:
l = []
for i in reversed(l):
if i == 0:
l.pop(-1)
else:
return l
return l
def pf(var, struct=1, key=None, cont=None):
"""
Prints type, length (if available) and value of a variable recursively.
(ANSI colors, bad output in cmd.exe, designed for PyCharm console)
:param var: var to print info from
:param struct: 0 to avoid print values
:param key: key of the dictionary
:param cont: stores the length of nested vars in a list to print proper indentation
"""
# print("{}".format(cont), end="")
if not cont:
cont = []
try:
l = len(var)
except TypeError:
l = None
# Prints "| " if nested, else prints " "
if len(cont) > 1:
remove_trailing_zeros(cont)
for c in cont[:-1]:
if c > 0:
print(u" \033[96m|\033[00m ", end="")
else:
print(u" ", end="")
# Prints " |->"
if len(cont) >= 1:
print(u" \033[96m|->\033[00m", end="")
# Substracts 1 from the last element of cont list
if len(cont) > 0 and cont[-1] > 0:
cont[-1] -= 1
# Prints the var type
print(u"\033[91m{}\033[00m".format(str(type(var)).rsplit()[1].replace("'", "").replace(">", "").upper()), end="")
# Prints the var length
if l is not None:
print(u":\033[93m{}\033[00m ".format(l), end="")
else:
print(end=" ")
# Prints the var value
if struct == 1:
if key is None:
print(var)
else:
print(u"'\033[95m{}\033[00m':{}".format(key, var))
else:
print(end="\n")
# If var is iterable call pf function for each value
if hasattr(var, '__iter__'):
cont.append(l)
if isinstance(var, dict):
for k, v in var.items():
pf(var=v, struct=struct, key=k, cont=cont)
else:
for i in var:
pf(var=i, struct=struct, cont=cont)
if __name__ == '__main__':
import datetime
integer = 1
decimal = 3.14159
string = "áñ@"
date = datetime.datetime.now()
aList = [integer, decimal, u"añ@2", string]
aList2 = aList[:]
aList2[1] = aList
aSet = set(aList)
aDict = {"key1": aList2, "key2": decimal, "date": date}
aTuple = (aList2, aDict, aList, aSet, aList2)
print(end="\n")
pf(aTuple)
print(end="\n")
pf(aTuple, 0)
comment(u"áñ@")
print(end="\n")
comment("áñ@")
print(end="\n")
comment(u"sentence")
| Taabu/batcave | utilidades.py | Python | gpl-2.0 | 3,746 |
from menpo.landmark.labels import ibug_face_66
def process(lms):
return ibug_face_66(lms)[1]
| menpo/menpobench | menpobench/predefined/landmark_process/face_ibug_68_to_face_ibug_66.py | Python | bsd-3-clause | 98 |
#!/usr/bin/python
#
# Copyright: Ansible Team
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: aireos_config
version_added: "2.4"
author: "James Mighion (@jmighion)"
short_description: Manage Cisco WLC configurations
description:
- AireOS does not use a block indent file syntax, so there are no sections or parents.
This module provides an implementation for working with AireOS configurations in
a deterministic way.
extends_documentation_fragment: aireos
options:
lines:
description:
- The ordered set of commands that should be configured.
The commands must be the exact same commands as found
in the device run-config. Be sure to note the configuration
command syntax as some commands are automatically modified by the
device config parser.
aliases: ['commands']
src:
description:
- Specifies the source path to the file that contains the configuration
or configuration template to load. The path to the source file can
either be the full path on the Ansible control host or a relative
path from the playbook or role root directory. This argument is mutually
exclusive with I(lines).
before:
description:
- The ordered set of commands to push on to the command stack if
a change needs to be made. This allows the playbook designer
the opportunity to perform configuration commands prior to pushing
any changes without affecting how the set of commands are matched
against the system.
after:
description:
- The ordered set of commands to append to the end of the command
stack if a change needs to be made. Just like with I(before) this
allows the playbook designer to append a set of commands to be
executed after the command set.
match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If
match is set to I(line), commands are matched line by line.
If match is set to I(none), the module will not attempt to
compare the source configuration with the running
configuration on the remote device.
default: line
choices: ['line', 'none']
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory. If the directory does not
exist, it is created.
type: bool
default: 'no'
running_config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(running_config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
aliases: ['config']
save:
description:
- The C(save) argument instructs the module to save the running-
config to the startup-config at the conclusion of the module
running. If check mode is specified, this argument is ignored.
type: bool
default: 'no'
diff_against:
description:
- When using the C(ansible-playbook --diff) command line argument
the module can generate diffs against different sources.
- When this option is configured as I(intended), the module will
return the diff of the running-config against the configuration
provided in the C(intended_config) argument.
- When this option is configured as I(running), the module will
return the before and after diff of the running-config with respect
to any changes made to the device configuration.
choices: ['intended', 'running']
diff_ignore_lines:
description:
- Use this argument to specify one or more lines that should be
ignored during the diff. This is used for lines in the configuration
that are automatically updated by the system. This argument takes
a list of regular expressions or exact line matches.
intended_config:
description:
- The C(intended_config) provides the master configuration that
the node should conform to and is used to check the final
running-config against. This argument will not modify any settings
on the remote device and is strictly used to check the compliance
of the current device's configuration against. When specifying this
argument, the task should also modify the C(diff_against) value and
set it to I(intended).
"""
EXAMPLES = """
- name: configure configuration
aireos_config:
lines: sysname testDevice
- name: diff the running-config against a provided config
aireos_config:
diff_against: intended
intended: "{{ lookup('file', 'master.cfg') }}"
- name: load new acl into device
aireos_config:
lines:
- acl create testACL
- acl rule protocol testACL 1 any
- acl rule direction testACL 3 in
before: acl delete testACL
"""
RETURN = """
commands:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['hostname foo', 'vlan 1', 'name default']
updates:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['hostname foo', 'vlan 1', 'name default']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: string
sample: /playbooks/ansible/backup/aireos_config.2016-07-16@22:28:34
"""
from ansible.module_utils.network.aireos.aireos import run_commands, get_config, load_config
from ansible.module_utils.network.aireos.aireos import aireos_argument_spec
from ansible.module_utils.network.aireos.aireos import check_args as aireos_check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import NetworkConfig, dumps
def get_running_config(module, config=None):
contents = module.params['running_config']
if not contents:
if config:
contents = config
else:
contents = get_config(module)
return NetworkConfig(indent=1, contents=contents)
def get_candidate(module):
candidate = NetworkConfig(indent=1)
if module.params['src']:
candidate.load(module.params['src'])
elif module.params['lines']:
candidate.add(module.params['lines'])
return candidate
def main():
""" main entry point for module execution
"""
argument_spec = dict(
src=dict(type='path'),
lines=dict(aliases=['commands'], type='list'),
before=dict(type='list'),
after=dict(type='list'),
match=dict(default='line', choices=['line', 'none']),
running_config=dict(aliases=['config']),
intended_config=dict(),
backup=dict(type='bool', default=False),
save=dict(type='bool', default=False),
diff_against=dict(choices=['running', 'intended']),
diff_ignore_lines=dict(type='list')
)
argument_spec.update(aireos_argument_spec)
mutually_exclusive = [('lines', 'src')]
required_if = [('diff_against', 'intended', ['intended_config'])]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
warnings = list()
aireos_check_args(module, warnings)
result = {'changed': False, 'warnings': warnings}
config = None
if module.params['backup'] or (module._diff and module.params['diff_against'] == 'running'):
contents = get_config(module)
config = NetworkConfig(indent=1, contents=contents)
if module.params['backup']:
result['__backup__'] = contents
if any((module.params['src'], module.params['lines'])):
match = module.params['match']
candidate = get_candidate(module)
if match != 'none':
config = get_running_config(module, config)
configobjs = candidate.difference(config, match=match)
else:
configobjs = candidate.items
if configobjs:
commands = dumps(configobjs, 'commands').split('\n')
if module.params['before']:
commands[:0] = module.params['before']
if module.params['after']:
commands.extend(module.params['after'])
result['commands'] = commands
result['updates'] = commands
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
diff_ignore_lines = module.params['diff_ignore_lines']
if module.params['save']:
result['changed'] = True
if not module.check_mode:
command = {"command": "save config", "prompt": "Are you sure you want to save", "answer": "y"}
run_commands(module, command)
else:
module.warn('Skipping command `save config` due to check_mode. Configuration not copied to non-volatile storage')
if module._diff:
output = run_commands(module, 'show run-config commands')
contents = output[0]
# recreate the object in order to process diff_ignore_lines
running_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)
if module.params['diff_against'] == 'running':
if module.check_mode:
module.warn("unable to perform diff against running-config due to check mode")
contents = None
else:
contents = config.config_text
elif module.params['diff_against'] == 'intended':
contents = module.params['intended_config']
if contents is not None:
base_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)
if running_config.sha1 != base_config.sha1:
result.update({
'changed': True,
'diff': {'before': str(base_config), 'after': str(running_config)}
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| Russell-IO/ansible | lib/ansible/modules/network/aireos/aireos_config.py | Python | gpl-3.0 | 10,887 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0019_auto_20151130_0122'),
]
operations = [
migrations.AlterModelOptions(
name='discipline',
options={'ordering': ['name']},
),
migrations.AddField(
model_name='disciplinestats',
name='rank',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='subjectstats',
name='rank',
field=models.IntegerField(default=0),
),
]
| rskwan/ucb-grade-dists | ucbgradedists/core/migrations/0020_auto_20151201_0051.py | Python | mit | 681 |
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: debug
type: stdout
short_description: formatted stdout/stderr display
description:
- Use this callback to sort though extensive debug output
version_added: "2.4"
extends_documentation_fragment:
- default_callback
requirements:
- set as stdout in configuration
'''
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
class CallbackModule(CallbackModule_default): # pylint: disable=too-few-public-methods,no-init
'''
Override for the default callback module.
Render std err/out outside of the rest of the result which it prints with
indentation.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'debug'
def _dump_results(self, result, indent=None, sort_keys=True, keep_invocation=False):
'''Return the text to output for a result.'''
# Enable JSON identation
result['_ansible_verbose_always'] = True
save = {}
for key in ['stdout', 'stdout_lines', 'stderr', 'stderr_lines', 'msg', 'module_stdout', 'module_stderr']:
if key in result:
save[key] = result.pop(key)
output = CallbackModule_default._dump_results(self, result)
for key in ['stdout', 'stderr', 'msg', 'module_stdout', 'module_stderr']:
if key in save and save[key]:
output += '\n\n%s:\n\n%s\n' % (key.upper(), save[key])
for key, value in save.items():
result[key] = value
return output
| alexlo03/ansible | lib/ansible/plugins/callback/debug.py | Python | gpl-3.0 | 1,772 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_auto_20141029_1945'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('name', models.CharField(max_length=32)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='article',
name='tags',
field=models.ManyToManyField(related_name='articles', to='blog.Tag', blank=True),
preserve_default=True,
),
]
| bobisjan/django-shanghai | tests/project/blog/migrations/0003_auto_20141104_2232.py | Python | mit | 823 |
from django.utils.translation import ugettext_lazy as _
# Reviewer Tools
REVIEWER_VIEWING_INTERVAL = 8 # How often we ping for "who's watching?"
REVIEWER_REVIEW_LOCK_LIMIT = 3 # How many pages can a reviewer "watch"
# Types of Canned Responses for reviewer tools.
CANNED_RESPONSE_ADDON = 1
CANNED_RESPONSE_THEME = 2
CANNED_RESPONSE_PERSONA = 3
CANNED_RESPONSE_CHOICES = {
CANNED_RESPONSE_ADDON: _('Add-on'),
CANNED_RESPONSE_THEME: _('Static Theme'),
CANNED_RESPONSE_PERSONA: _('Persona'),
}
# Risk tiers for post-review weight.
POST_REVIEW_WEIGHT_HIGHEST_RISK = 150
POST_REVIEW_WEIGHT_HIGH_RISK = 100
POST_REVIEW_WEIGHT_MEDIUM_RISK = 20
# Reviewer Incentive Scores.
# Note: Don't change these since they're used as keys in the database.
REVIEWED_MANUAL = 0
REVIEWED_ADDON_FULL = 10
_REVIEWED_ADDON_PRELIM = 11 # Deprecated for new reviews - no more prelim.
REVIEWED_ADDON_UPDATE = 12
REVIEWED_DICT_FULL = 20
_REVIEWED_DICT_PRELIM = 21 # Deprecated for new reviews - no more prelim.
REVIEWED_DICT_UPDATE = 22
REVIEWED_LP_FULL = 30
_REVIEWED_LP_PRELIM = 31 # Deprecated for new reviews - no more prelim.
REVIEWED_LP_UPDATE = 32
REVIEWED_PERSONA = 40
REVIEWED_STATICTHEME = 41
# TODO: Leaving room for persona points based on queue.
REVIEWED_SEARCH_FULL = 50
_REVIEWED_SEARCH_PRELIM = 51 # Deprecated for new reviews - no more prelim.
REVIEWED_SEARCH_UPDATE = 52
REVIEWED_XUL_THEME_FULL = 60
_REVIEWED_XUL_THEME_PRELIM = 61 # Deprecated for new reviews - no more prelim.
REVIEWED_XUL_THEME_UPDATE = 62
REVIEWED_ADDON_REVIEW = 80
REVIEWED_ADDON_REVIEW_POORLY = 81
REVIEWED_CONTENT_REVIEW = 101
REVIEWED_EXTENSION_HIGHEST_RISK = 102
REVIEWED_EXTENSION_HIGH_RISK = 103
REVIEWED_EXTENSION_MEDIUM_RISK = 104
REVIEWED_EXTENSION_LOW_RISK = 105
# We need to keep the deprecated choices for existing points in the database.
REVIEWED_CHOICES = {
REVIEWED_MANUAL: _('Manual Reviewer Points'),
REVIEWED_ADDON_FULL: _('New Add-on Review'),
_REVIEWED_ADDON_PRELIM: _('Preliminary Add-on Review'),
REVIEWED_ADDON_UPDATE: _('Updated Add-on Review'),
REVIEWED_DICT_FULL: _('New Dictionary Review'),
_REVIEWED_DICT_PRELIM: _('Preliminary Dictionary Review'),
REVIEWED_DICT_UPDATE: _('Updated Dictionary Review'),
REVIEWED_LP_FULL: _('New Language Pack Review'),
_REVIEWED_LP_PRELIM: _('Preliminary Language Pack Review'),
REVIEWED_LP_UPDATE: _('Updated Language Pack Review'),
REVIEWED_PERSONA: _('Theme Review'),
REVIEWED_STATICTHEME: _('Theme (Static) Review'),
REVIEWED_SEARCH_FULL: _('New Search Provider Review'),
_REVIEWED_SEARCH_PRELIM: _('Preliminary Search Provider Review'),
REVIEWED_SEARCH_UPDATE: _('Updated Search Provider Review'),
REVIEWED_XUL_THEME_FULL: _('New Complete Theme Review'),
_REVIEWED_XUL_THEME_PRELIM: _('Preliminary Complete Theme Review'),
REVIEWED_XUL_THEME_UPDATE: _('Updated Complete Theme Review'),
REVIEWED_ADDON_REVIEW: _('Moderated Add-on Review'),
REVIEWED_ADDON_REVIEW_POORLY: _('Add-on Review Moderation Reverted'),
REVIEWED_CONTENT_REVIEW: _('Add-on Content Review'),
REVIEWED_EXTENSION_HIGHEST_RISK:
_('Post-Approval Add-on Review (Highest Risk)'),
REVIEWED_EXTENSION_HIGH_RISK:
_('Post-Approval Add-on Review (High Risk)'),
REVIEWED_EXTENSION_MEDIUM_RISK:
_('Post-Approval Add-on Review (Medium Risk)'),
REVIEWED_EXTENSION_LOW_RISK:
_('Post-Approval Add-on Review (Low Risk)'),
}
REVIEWED_OVERDUE_BONUS = 2
REVIEWED_OVERDUE_LIMIT = 7
REVIEWED_SCORES = {
REVIEWED_MANUAL: 0,
REVIEWED_ADDON_FULL: 120,
REVIEWED_ADDON_UPDATE: 80,
REVIEWED_DICT_FULL: 60,
REVIEWED_DICT_UPDATE: 60,
REVIEWED_LP_FULL: 60,
REVIEWED_LP_UPDATE: 60,
REVIEWED_PERSONA: 5,
REVIEWED_STATICTHEME: 5,
REVIEWED_SEARCH_FULL: 30,
REVIEWED_SEARCH_UPDATE: 30,
REVIEWED_XUL_THEME_FULL: 80,
REVIEWED_XUL_THEME_UPDATE: 80,
REVIEWED_ADDON_REVIEW: 1,
REVIEWED_ADDON_REVIEW_POORLY: -1, # -REVIEWED_ADDON_REVIEW,
REVIEWED_CONTENT_REVIEW: 10,
REVIEWED_EXTENSION_HIGHEST_RISK: 140,
REVIEWED_EXTENSION_HIGH_RISK: 120,
REVIEWED_EXTENSION_MEDIUM_RISK: 90,
REVIEWED_EXTENSION_LOW_RISK: 0,
}
REVIEWED_AMO = (
REVIEWED_ADDON_FULL,
REVIEWED_ADDON_UPDATE,
REVIEWED_DICT_FULL,
REVIEWED_DICT_UPDATE,
REVIEWED_LP_FULL,
REVIEWED_LP_UPDATE,
REVIEWED_SEARCH_FULL,
REVIEWED_SEARCH_UPDATE,
REVIEWED_XUL_THEME_FULL,
REVIEWED_XUL_THEME_UPDATE,
REVIEWED_STATICTHEME,
REVIEWED_ADDON_REVIEW,
REVIEWED_CONTENT_REVIEW,
REVIEWED_EXTENSION_HIGHEST_RISK,
REVIEWED_EXTENSION_HIGH_RISK,
REVIEWED_EXTENSION_MEDIUM_RISK,
REVIEWED_EXTENSION_LOW_RISK,
)
REVIEWED_LEVELS = [
{'name': _('Level 1'), 'points': 2160},
{'name': _('Level 2'), 'points': 4320},
{'name': _('Level 3'), 'points': 8700},
{'name': _('Level 4'), 'points': 21000},
{'name': _('Level 5'), 'points': 45000},
{'name': _('Level 6'), 'points': 96000},
{'name': _('Level 7'), 'points': 300000},
{'name': _('Level 8'), 'points': 1200000},
{'name': _('Level 9'), 'points': 3000000},
]
# Amount of hours to hide add-on reviews from users with permission
# Addons:DelayedReviews
REVIEW_LIMITED_DELAY_HOURS = 20
# Review queue pagination
REVIEWS_PER_PAGE = 200
REVIEWS_PER_PAGE_MAX = 400
# Theme review queue constants.
THEME_INITIAL_LOCKS = 5 # Initial number of themes to check out.
THEME_LOCK_EXPIRY = 30 # Minutes.
ACTION_MOREINFO = 0
ACTION_FLAG = 1
ACTION_DUPLICATE = 2
ACTION_REJECT = 3
ACTION_APPROVE = 4
REVIEW_ACTIONS = {
ACTION_MOREINFO: _('Request More Info'),
ACTION_FLAG: _('Flag'),
ACTION_DUPLICATE: _('Duplicate'),
ACTION_REJECT: _('Reject'),
ACTION_APPROVE: _('Approve')
}
THEME_REJECT_REASONS = {
# 0: _('Other rejection reason'),
1: _('Sexual or pornographic content'),
2: _('Inappropriate or offensive content'),
3: _('Violence, war, or weaponry images'),
4: _('Nazi or other hate content'),
5: _('Defamatory content'),
6: _('Online gambling'),
7: _('Spam content'),
8: _('Low-quality, stretched, or blank image'),
9: _('Header image alignment problem'),
}
WOULD_NOT_HAVE_BEEN_AUTO_APPROVED = 0
WOULD_HAVE_BEEN_AUTO_APPROVED = 1
AUTO_APPROVED = 2
NOT_AUTO_APPROVED = 3
AUTO_APPROVAL_VERDICT_CHOICES = (
(WOULD_NOT_HAVE_BEEN_AUTO_APPROVED,
'Would have been auto-approved (dry-run mode was in effect)'),
(WOULD_HAVE_BEEN_AUTO_APPROVED,
'Would *not* have been auto-approved (dry-run mode was in effect)'),
(AUTO_APPROVED, 'Was auto-approved'),
(NOT_AUTO_APPROVED, 'Was *not* auto-approved'),
)
| harry-7/addons-server | src/olympia/constants/reviewers.py | Python | bsd-3-clause | 6,664 |
from django.conf.urls.defaults import patterns, include, url
from django.views.generic.simple import redirect_to
from webdnd.player.views.main import HomeView
from webdnd.player.views.main import AboutView
from webdnd.player.views.main import ContactView
urlpatterns = patterns('webdnd.player.views.main',
url(r'^/?$', redirect_to, {'url': '/home'}),
url(r'^home/?$', HomeView.as_view(), name='main_home'),
url(r'^about/?$', AboutView.as_view(), name='main_about'),
url(r'^contact_us/?$', ContactView.as_view(), name='main_contact_us'),
)
| Saevon/webdnd | player/urls/main.py | Python | mit | 558 |
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Script for pushing new earthquake impact report**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'imajimatika@gmail.com'
__version__ = '0.5.0'
__date__ = '21/02/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import os
import shutil
import sys
from utils import is_event_id
import logging
# The logger is initialized in utils.py by init
LOGGER = logging.getLogger('InaSAFE')
try:
earth_quake_source_path = os.environ['EQ_SOURCE_PATH']
earth_quake_public_path = os.environ['EQ_PUBLIC_PATH']
earth_quake_guide_path = os.environ['EQ_GUIDE_PATH']
except KeyError:
LOGGER.exception('EQ_SOURCE_PATH or EQ_PUBLIC_PATH are not set!')
sys.exit()
def get_list_dir(path_dir, filter_function=None):
"""Return list of file or directory in path_dir
with filter function filter_function.
:param path_dir:
:param filter_function:
"""
list_dir = os.listdir(path_dir)
print 'list_dir', len(list_dir)
if filter_function is None:
return list_dir
retval = []
for my_dir in list_dir:
if filter_function(my_dir):
retval.append(my_dir)
return retval
def get_event_id(report_filename):
"""Custom function to return event id from a filename
Thi is for filename format like:
earthquake_impact_map_20120216181705.pdf
:param report_filename:
"""
return report_filename[-18:-4]
def filter_zip_eq_event(zip_eq_event):
"""Return true if zip_eq_event in the following format:
YYYYBBDDhhmmss.out.zip
for example : 20130226211002.out.zip
:param zip_eq_event
"""
expected_len = len('20130226211002.out.zip')
if len(zip_eq_event) != expected_len:
return False
my_event_id = zip_eq_event[:14]
if is_event_id(my_event_id):
return True
else:
return False
def filter_eq_map(eq_map_path):
"""Return true if eq_map_path in the following format:
earthquake_impact_map_YYYYBBDDhhmmss.pdf
for example : earthquake_impact_map_20120216181705.pdf
:param eq_map_path
"""
expected_len = len('earthquake_impact_map_20120216181705.pdf')
if len(eq_map_path) != expected_len:
return False
print 'eq', eq_map_path
# if not re.match(eq_map_path, my_regex):
# print 're'
# return False
my_event_id = get_event_id(eq_map_path)
print 'my_event_id', my_event_id
if is_event_id(my_event_id):
return True
else:
print 'not event id'
return False
def sort_event(events):
"""Sort list of event id my_event as list ascending.
:param events:
"""
try:
sorted_events = sorted([int(x) for x in events])
return sorted_events
except ValueError as e:
raise e
def get_last_event_id(events):
"""Return last event id of my_events.
:param events:
"""
sorted_events = sort_event(events)[-1]
return sorted_events
def update_report(source_path, public_path, last_event_id):
"""Copy latest report to my_public_path and make a copy with
a latest_earthquake_impact_map.pdf and latest_earthquake_impact_map.png
:param source_path:
:param public_path:
:param last_event_id:
"""
last_event_id = str(last_event_id)
source_dir = os.path.join(source_path, last_event_id)
report_filename = last_event_id + '-id'
pdf_file = report_filename + '.pdf'
pdf_path = os.path.join(source_dir, pdf_file)
png_path = pdf_path.replace('.pdf', '.png')
public_pdf_file = 'earthquake_impact_map_' + last_event_id + '.pdf'
public_pdf_path = os.path.join(public_path, public_pdf_file)
latest_pdf_path = os.path.join(public_path,
'latest_earthquake_impact_map.pdf')
latest_png_path = os.path.join(public_path,
'latest_earthquake_impact_map.png')
# copy file
shutil.copy2(png_path, latest_png_path)
print 'copied to ' + latest_png_path
shutil.copy2(pdf_path, latest_pdf_path)
print 'copied to ' + latest_pdf_path
shutil.copy2(pdf_path, public_pdf_path)
print 'copied to ' + public_pdf_path
def main():
"""The implementation
"""
source_path = earth_quake_source_path
public_path = earth_quake_public_path
# guide path is a path that has list of event id to be pushed on the
# website. It's caused because there is a case where we don't want to
# push all realtime earthquake report, but only the important one.
# This path usually in the specific format and is used to get the latest
# event id
guide_path = earth_quake_guide_path
guide_files = get_list_dir(guide_path, filter_zip_eq_event)
guide_events = [x[:14] for x in guide_files]
last_guide = get_last_event_id(guide_events)
public_files = get_list_dir(public_path, filter_eq_map)
print ' public_files', public_files
public_events = [get_event_id(x) for x in public_files]
print 'public_events', public_events
last_public = get_last_event_id(public_events)
if last_guide > last_public:
last_event_id = last_guide
print 'There is new eq impact map.'
# do_something_here()
update_report(source_path, public_path, last_event_id)
else:
print 'Not new eq impact, everything is safe.'
if __name__ == '__main__':
main()
| drayanaindra/inasafe | realtime/update_latest_report.py | Python | gpl-3.0 | 5,772 |
""" DIRAC Graphs package provides tools for creation of various plots to provide
graphical representation of the DIRAC Monitoring and Accounting data
The DIRAC Graphs package is derived from the GraphTool plotting package of the
CMS/Phedex Project by ... <to be added>
"""
__RCSID__ = "$Id$"
# Make sure the the Agg backend is used despite arbitrary configuration
import matplotlib
matplotlib.use( 'agg' )
import DIRAC
from DIRAC.Core.Utilities.Graphs.Graph import Graph
from DIRAC.Core.Utilities.Graphs.GraphUtilities import evalPrefs
common_prefs = {
'background_color':'white',
'figure_padding':12,
'plot_grid':'1:1',
'plot_padding':0,
'frame':'On',
'font' : 'Lucida Grande',
'font_family' : 'sans-serif',
'dpi':100,
'legend':True,
'legend_position':'bottom',
'legend_max_rows':99,
'legend_max_columns':4,
'square_axis':False,
'scale_data': None,
'scale_ticks': None
}
graph_large_prefs = {
'width':1000,
'height':700,
'text_size':8,
'subtitle_size':10,
'subtitle_padding':5,
'title_size':15,
'title_padding':5,
'text_padding':5,
'figure_padding':15,
'plot_title_size':12,
'legend_width':980,
'legend_height':150,
'legend_padding':20,
'limit_labels':15,
'graph_time_stamp':True
}
graph_normal_prefs = {
'width':800,
'height':600,
'text_size':8,
'subtitle_size':10,
'subtitle_padding':5,
'title_size':15,
'title_padding':10,
'text_padding':5,
'figure_padding':12,
'plot_title_size':12,
'legend_width':780,
'legend_height':120,
'legend_padding':20,
'limit_labels':15,
'graph_time_stamp':True,
'label_text_size' : 14
}
graph_small_prefs = {
'width':450,
'height':330,
'text_size':10,
'subtitle_size':5,
'subtitle_padding':4,
'title_size':10,
'title_padding':6,
'text_padding':3,
'figure_padding':10,
'plot_title_size':8,
'legend_width':430,
'legend_height':50,
'legend_padding':10,
'limit_labels':15,
'graph_time_stamp':True
}
graph_thumbnail_prefs = {
'width':100,
'height':80,
'text_size':6,
'subtitle_size':0,
'subtitle_padding':0,
'title_size':8,
'title_padding':2,
'text_padding':1,
'figure_padding':2,
'plot_title':'NoTitle',
'legend':False,
'plot_axis_grid':False,
'plot_axis':False,
'plot_axis_labels':False,
'graph_time_stamp':False,
'tight_bars':True
}
def graph( data, fileName, *args, **kw ):
prefs = evalPrefs( *args, **kw )
if prefs.has_key( 'graph_size' ):
graph_size = prefs['graph_size']
else:
graph_size = "normal"
if graph_size == "normal":
defaults = graph_normal_prefs
elif graph_size == "small":
defaults = graph_small_prefs
elif graph_size == "thumbnail":
defaults = graph_thumbnail_prefs
elif graph_size == "large":
defaults = graph_large_prefs
graph = Graph()
graph.makeGraph( data, common_prefs, defaults, prefs )
graph.writeGraph( fileName, 'PNG' )
return DIRAC.S_OK( {'plot':fileName} )
def __checkKW( kw ):
if 'watermark' not in kw:
kw[ 'watermark' ] = "%s/DIRAC/Core/Utilities/Graphs/Dwatermark.png" % DIRAC.rootPath
return kw
def barGraph( data, fileName, *args, **kw ):
kw = __checkKW( kw )
graph( data, fileName, plot_type = 'BarGraph', statistics_line = True, *args, **kw )
def lineGraph( data, fileName, *args, **kw ):
kw = __checkKW( kw )
graph( data, fileName, plot_type = 'LineGraph', statistics_line = True, *args, **kw )
def curveGraph( data, fileName, *args, **kw ):
kw = __checkKW( kw )
graph( data, fileName, plot_type = 'CurveGraph', statistics_line = False, *args, **kw )
def cumulativeGraph( data, fileName, *args, **kw ):
kw = __checkKW( kw )
graph( data, fileName, plot_type = 'LineGraph', cumulate_data = True, *args, **kw )
def pieGraph( data, fileName, *args, **kw ):
kw = __checkKW( kw )
prefs = {'xticks':False, 'yticks':False, 'legend_position':'right'}
graph( data, fileName, prefs, plot_type = 'PieGraph', *args, **kw )
def qualityGraph( data, fileName, *args, **kw ):
kw = __checkKW( kw )
prefs = {'plot_axis_grid':False}
graph( data, fileName, prefs, plot_type = 'QualityMapGraph', *args, **kw )
def textGraph( text, fileName, *args, **kw ):
kw = __checkKW( kw )
prefs = {'text_image':text}
graph( {}, fileName, prefs, *args, **kw )
def histogram( data, fileName, bins, *args, **kw ):
try:
from pylab import hist
except:
print "No pylab module available"
return
kw = __checkKW( kw )
values, vbins, _patches = hist( data, bins )
histo = dict( zip( vbins, values ) )
span = ( max( data ) - min( data ) ) / float( bins ) * 0.95
kw = __checkKW( kw )
graph( histo, fileName, plot_type = 'BarGraph', span = span, statistics_line = True, *args, **kw )
| andresailer/DIRAC | Core/Utilities/Graphs/__init__.py | Python | gpl-3.0 | 4,866 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import redirect
from flask import request
from flask import url_for
from flask.ext.wtf import Form as BaseForm
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.orm.exc import NoResultFound
from wtforms import fields
from wtforms import validators
from .confirmable import user_requires_confirmation
from .models import User
from .utilities import get_message
from .utilities import get_redirect
from .utilities import is_safe_redirect_url
_default_form_field_labels = {
'email': 'Email Address',
'password': 'Password',
'password_confirm': 'Password Confirmation',
'remember_me': 'Remember Me',
}
class ValidatorMixin(object):
def __call__(self, form, field):
if self.message and self.message.isupper():
self.message = get_message(self.message)
return super(ValidatorMixin, self).__call__(form, field)
class EqualTo(ValidatorMixin, validators.EqualTo):
pass
class Required(ValidatorMixin, validators.Required):
pass
class Email(ValidatorMixin, validators.Email):
pass
class Length(ValidatorMixin, validators.Length):
pass
email_required = Required(message='EMAIL_ADDRESS_NOT_PROVIDED')
email_validator = Email(message='EMAIL_ADDRESS_INVALID')
password_required = Required(message='PASSWORD_NOT_PROVIDED')
password_length = Length(min=8, max=128, message='PASSWORD_LENGTH_INVALID')
def valid_user_email(form, field):
form.user = User.query.filter_by(email=field.data).first()
if form.user is None:
raise validators.ValidationError(get_message('USER_DOES_NOT_EXIST'))
def get_form_field_label(form_field_label):
""" Modify here if you want i18n. Probably need to turn defaults into
lazy_gettext calls, etc. """
return _default_form_field_labels.get(form_field_label, form_field_label)
class Form(BaseForm):
def __init__(self, *args, **kwargs):
super(Form, self).__init__(*args, **kwargs)
class NextFormMixin(object):
next = fields.HiddenField()
def __init__(self, *args, **kwargs):
super(NextFormMixin, self).__init__(*args, **kwargs)
if not self.next.data:
self.next.data = get_redirect() or ''
def redirect(self, endpoint, **values):
if is_safe_redirect_url(self.next.data):
return redirect(self.next.data)
redirect_url = get_redirect()
return redirect(redirect_url or url_for(endpoint, **values))
class EmailFormMixin(object):
email = fields.StringField(
get_form_field_label('email'),
validators=[email_required, email_validator])
class PasswordFormMixin(object):
password = fields.PasswordField(
get_form_field_label('password'), validators=[password_required])
class NewPasswordFormMixin(object):
password = fields.PasswordField(
get_form_field_label('password'),
validators=[password_required, password_length])
class PasswordConfirmFormMixin(object):
password_confirm = fields.PasswordField(
get_form_field_label('password_confirm'),
validators=[EqualTo('password',
message='PASSWORD_CONFIRMATION_INVALID')])
class ForgotPasswordForm(Form):
user = None
email = fields.StringField(
get_form_field_label('email'),
validators=[email_required, email_validator, valid_user_email])
def __init__(self, *args, **kwargs):
super(ForgotPasswordForm, self).__init__(*args, **kwargs)
if request.method == 'GET':
self.email.data = request.args.get('email', None)
def validate(self):
if not super(ForgotPasswordForm, self).validate():
return False
if user_requires_confirmation(self.user):
self.email.errors.append(
get_message('EMAIL_ADDRESS_NOT_CONFIRMED'))
return False
return True
class LoginForm(Form,
EmailFormMixin,
PasswordFormMixin,
NextFormMixin):
user = None
remember_me = fields.BooleanField(get_form_field_label('remember_me'))
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
def validate(self):
if not super(LoginForm, self).validate():
return False
try:
self.user = User.query.filter(User.email == self.email.data).one()
except (MultipleResultsFound, NoResultFound):
self.email.errors.append(get_message('PASSWORD_INVALID'))
return False
if self.user is None:
self.email.errors.append(get_message('PASSWORD_INVALID'))
return False
elif not self.user.is_valid_password(self.password.data):
self.email.errors.append(get_message('PASSWORD_INVALID'))
return False
elif user_requires_confirmation(self.user):
self.email.errors.append(
get_message('EMAIL_ADDRESS_NOT_CONFIRMED'))
return False
return True
class RegisterForm(Form,
EmailFormMixin,
PasswordFormMixin,
NewPasswordFormMixin,
PasswordConfirmFormMixin):
user = None
def __init__(self, *args, **kwargs):
super(RegisterForm, self).__init__(*args, **kwargs)
def validate(self):
if not super(RegisterForm, self).validate():
return False
user = User.query.filter_by(email=self.email.data).first()
if user:
self.email.errors.append(get_message('EMAIL_ADDRESS_EXISTS'))
return False
return True
class ResetPasswordForm(Form,
NewPasswordFormMixin,
PasswordConfirmFormMixin):
pass
class SendConfirmationForm(Form):
user = None
email = fields.StringField(
get_form_field_label('email'),
validators=[email_required, email_validator, valid_user_email])
def __init__(self, *args, **kwargs):
super(SendConfirmationForm, self).__init__(*args, **kwargs)
if request.method == 'GET':
self.email.data = request.args.get('email', None)
def validate(self):
if not super(SendConfirmationForm, self).validate():
return False
if self.user.confirmed_at is not None:
self.email.errors.append(
get_message('EMAIL_ADDRESSS_ALREADY_CONFIRMED'))
return False
return True
# vim: filetype=python
| oldhawaii/oldhawaii-metadata | www/oldhawaii_metadata/apps/users/forms.py | Python | mit | 6,532 |
#!/usr/bin/python -i
# -*- coding: utf-8 -*-
import sys, os, re
from glob import glob
#sys.path = ['/home/lcampagn/work/manis_lab/code/libs'] + sys.path
localDir = os.path.dirname(__file__)
sys.path.append(os.path.join(localDir, '..'))
sys.path.append(os.path.join(localDir, '../util'))
#from helpers import *
from PyQt4 import QtGui, QtCore
from acq4.pyqtgraph.widgets import *
from acq4.pyqtgraph.graphicsWindows import *
import Image
from acq4.util.functions import *
from scipy.ndimage import *
from scipy.ndimage import correlate
app = QtGui.QApplication([])
def dirDialog(startDir='', title="Select Directory"):
return str(QtGui.QFileDialog.getExistingDirectory(None, title, startDir))
images = []
def showImg(data=None, parent=None, title='', copy=True):
if data is None:
fileName = fileDialog()
title = fileName
data = loadImg(fileName)
elif type(data) is types.StringType:
title = data
data = loadImg(data)
title = "Image %d: %s" % (len(images), title)
i = ImageWindow(title=title)
i.setImage(data)
images.append(i)
return i
def loadImageDir(dirName=None):
if dirName is None:
dirName = dirDialog()
# Generate list of files, sort
files = os.listdir(dirName)
files.sort()
files = filter(lambda f: os.path.splitext(f)[1][1:] in IMAGE_EXTENSIONS, files)
files = [os.path.join(dirName, f) for f in files]
return loadImageList(files)
def loadImageList(files):
# open first image to get dimensions
img = asarray(Image.open(files[0]))
# Create empty 3D array, fill first frame
data = empty((len(files),) + img.shape, dtype=img.dtype)
data[0] = img
# Fill array with image data
for i in range(1, len(files)):
img = Image.open(files[i])
data[i] = asarray(img)
return data.transpose((0, 2, 1))
def loadImg(fileName=None):
if fileName is None:
fileName = fileDialog()
if not os.path.isfile(fileName):
raise Exception("No file named %s", fileName)
img = Image.open(fileName)
numFrames = 0
try:
while True:
img.seek( numFrames )
numFrames += 1
except EOFError:
img.seek( 0 )
pass
im1 = numpy.asarray(img)
axes = range(0, im1.ndim)
#print axes
axes[0:2] = [1,0]
axes = tuple(axes)
#print axes
im1 = im1.transpose(axes)
if numFrames == 1:
return im1
imgArray = numpy.empty((numFrames,) + im1.shape, im1.dtype)
frame = 0
try:
while True:
img.seek( frame )
imgArray[frame] = numpy.asarray(img).transpose(axes)
frame += 1
except EOFError:
img.seek( 0 )
pass
return imgArray
def meanDivide(data, axis, inplace=False):
if not inplace:
d = empty(data.shape, dtype=float32)
ind = [slice(None)] * data.ndim
for i in range(0, data.shape[axis]):
ind[axis] = i
if inplace:
data[tuple(ind)] /= data[tuple(ind)].mean()
else:
d[tuple(ind)] = data[tuple(ind)].astype(float32) / data[tuple(ind)].mean()
if not inplace:
return d
if len(sys.argv) > 1:
dataDir = sys.argv[1]
else:
dataDir = dirDialog()
#dataDir = '/home/lcampagn/work/manis_lab/data/2008.09.30/record_016'
info = {}
try:
fd = open(os.path.join(dataDir, '.info'), 'r')
inf = re.sub('\r', '', fd.read())
fd.close()
exec(inf)
except:
print "Warning: could not open info file"
## Find file names
cFrames = glob(os.path.join(dataDir, '*.tif'))
dFrames = glob(os.path.join(dataDir, '*.dat'))
cFrames.sort()
dFrames.sort()
## Load images
img = loadImageList(cFrames).astype(float32)
## pre-processing
#img = medianDivide(img, axis=0)
## Determine frame times
cTimes = []
dTimes = []
for f in cFrames:
m = re.match(r'.*_([^_]+)\.tif', f)
cTimes.append(float(m.groups()[0]))
for f in dFrames:
m = re.match(r'.*_([^_]+)\.dat', f)
dTimes.append(float(m.groups()[0]))
cTimes = array(cTimes)
dTimes = array(dTimes)
startTime = cTimes.min()
cTimes -= startTime
dTimes -= startTime
## Create image window
image = showImg(img)
## Build plot window
plotWindow = QtGui.QMainWindow()
plotCW = QtGui.QScrollArea()
plotWindow.setCentralWidget(plotCW)
plotSW = QtGui.QWidget()
plotSW.setMinimumSize(300, 300)
plotSW.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding))
plotCW.setWidget(plotSW)
plotCW.setWidgetResizable(True)
plotVBox = QtGui.QVBoxLayout()
plotSW.setLayout(plotVBox)
plotWindow.show()
plotWindow.resize(600, 400)
plots = []
## Build analysis control window
ctrlWindow = QtGui.QMainWindow()
ctrlCW = QtGui.QWidget()
ctrlWindow.setCentralWidget(ctrlCW)
ctrlVBox = QtGui.QVBoxLayout()
ctrlCW.setLayout(ctrlVBox)
ctrlRadius = QtGui.QDoubleSpinBox()
ctrlRadius.setDecimals(1)
ctrlRadius.setSingleStep(0.5)
ctrlRadius.setRange(0.5, 1000.)
ctrlGenROI = QtGui.QPushButton("Generate ROIs")
ctrlVBox.addWidget(ctrlRadius)
ctrlVBox.addWidget(ctrlGenROI)
ctrlWindow.show()
## Create physiology plot
if len(dFrames) > 0:
physPlot = PlotWidget()
plots.append(physPlot)
plotVBox.addWidget(physPlot)
## Load physiology data
physFrames = []
for df in dFrames:
d = fromfile(df, dtype=info['daq']['dtype'])
d.shape = info['daq']['shape']
physFrames.append(d)
## Decimate data, create times, and create plot
dec = 0.1
physData = zoom(hstack(physFrames), (1.0, dec))
physTimes = linspace(0, physData.shape[1] / (info['daq']['rate'] * dec) + dTimes[0], physData.shape[1])
physPlot.createPlot(physData[0], xVals=physTimes, color=(200, 200, 200))
physPlot.autoRange()
## Function for adding new ROIs
rois = []
def addROI():
global rois, img, images, plots, cTimes, plotVBox
c = intColor(len(rois))
roi = RectROI([0, 0], [5, 5], translateSnap=True, scaleSnap=True)
roi.setPen(QtGui.QPen(c))
rois.append(roi)
images[0].addItem(roi)
p = PlotWidget(None, name='ROI-%03d' % len(rois))
#p.ui.btnHorizScale.setChecked(True)
p.addCurve(ROIPlotItem(roi, img, images[0].imageItem, axes=(1,2), xVals=cTimes, color=c))
p.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding))
p.setMinimumSize(100, 100)
plotVBox.addWidget(p)
p.line = QtGui.QGraphicsLineItem(0, 1e100, 0, -1e100)
p.addItem(p.line)
p.line.setPen(QtGui.QPen(QtGui.QColor(200, 200, 0)))
QtCore.QObject.connect(images[0].cw, QtCore.SIGNAL('timeChanged'), lambda i,t: p.line.setLine(cTimes[i], 1e100, cTimes[i], -1e100))
#QtCore.QObject.connect(images[0].cw, QtCore.SIGNAL('timeChanged'), p.scene.invalidate)
## improves performance
#images[0].ui.timeSlider.setTracking(False)
QtCore.QObject.connect(p, QtCore.SIGNAL('closed'), lambda: images[0].removeItem(roi))
#for pp in plots:
#p.view.lockXRange(pp.view)
#pp.view.lockXRange(p.view)
plots.append(p)
def activeRegions(img):
## normalize first
im1 = meanDivide(img, axis=0)
im2 = im1.max(axis=0).astype(float32) - im1.min(axis=0)
return im2 / gaussian_filter(im2, (15, 15))
def enhanceCells(img, radius=2.0):
"""Locate cells in image, return image. Threshold should be between 0 and 1, where 1 is most selective."""
## Create a cell template
c = generateSphere(radius)
## Create a 'center-surround' mask to enhance cell bodies
im2 = img - (img.max()+img.min())*0.5
c2 = c - c.max()*0.5
print im2.shape, c2.shape
mask = correlate(im2, c2)
#showImg(mask, title='correlation mask')
## Apply mask to original data
im3 = mask * (img-img.min())
return im3
#def labelPeaks(img, threshold=0.5):
### Threshold enhanced image
#img -= img.min() + (img.max()-img.min()) * threshold
#img[img < 0] = 0.
##showImg(img, title="Masked, thresholded image")
#l = label(img)[0]
#return l
def buildROIs():
global enhImage, plots, cTimes
img = enhImage.data.copy()
## Threshold enhanced image
img -= enhImage.blackLevel()
img[img < 0] = 0.
#showImg(img, title="Masked, thresholded image")
labels = label(img)[0]
for l in find_objects(labels):
addROI()
r = rois[-1]
r.setPos([l[0].start-2, l[1].start-2])
r.setSize([5, 5])
plots[0].setXRange(0, cTimes[-1])
def export():
data = empty((len(plots)+1, img.shape[0]))
data[0] = cTimes
roiData = []
for i in range(len(plots)):
data[i+1] = plots[i].plots[0].getRoiData()
roiData.append([rois[i].pos().x(), rois[i].pos().y(), rois[i].boundingRect().height(), rois[i].boundingRect().width()])
f = saveDialog()
writeCsv(data.transpose(), f)
fd = open(f + '.info', 'w')
for rd in roiData:
fd.write(' '.join(map(str, rd)) + '\n')
fd.close()
act = activeRegions(img)
arImg = showImg(act, title='active regions')
enhImage = showImg(zeros((2, 2)), "Active regions (enhanced)")
def updateEnhancedImage(r):
global act, enhImage
enh = enhanceCells(act, radius=r)
enhImage.setImage(enh, autoRange=True)
enhImage.data = enh
#showImg(enh, title="Active region (Enhanced for cells)")
QtCore.QObject.connect(ctrlRadius, QtCore.SIGNAL('valueChanged(double)'), updateEnhancedImage)
ctrlRadius.setValue(3.0)
QtCore.QObject.connect(ctrlGenROI, QtCore.SIGNAL('clicked()'), buildROIs)
#l = labelPeaks(enh, threshold=0.4)
#lcImg = showImg(l, title='labeled cells')
#buildROIs(l)
| hiuwo/acq4 | acq4/analysis/old/caImagingAnalysis.py | Python | mit | 9,655 |
#!/usr/bin/python
# coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License.
#
# OVERVIEW
#
# This script calculates the visual weight and width of fonts.
# It runs on a folder of TTFs.
#
# For width, it just measures the width of how a particular piece of text renders.
# For weight, it measures the darness of a piece of text.
#
# USAGE
#
# python compute_font_weight_and_width.py --metric=width --folder="ttfs/*.ttf" --debug=True
#
# - Valid values for the metric are 'width' and 'weight'
# - If the debug property is set to True, a server will spin up with images for visual inspection.
# Otherwise, the values (from 0.0-1.0) will be output to the terminal.
#
# DEPENDENCIES
#
# The script depends on the Python Imaging Library (PIL) <http://www.pythonware.com/products/pil/>
# If you have pip <https://pypi.python.org/pypi/pip> installed, run:
# > sudo pip install pil
#
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
import argparse
import glob
import os
import sys
import BaseHTTPServer
import SocketServer
import StringIO
# The font size used to test for weight and width.
FONT_SIZE = 30
# The text used to test weight and width. Note that this could be
# problematic if a given font doesn't have latin support.
TEXT = "AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvXxYyZz"
# The port on which the debug server will be hosted.
PORT = 8080
# Fonts that cause problems: any filenames containing these letters
# will be skipped.
# TODO: Investigate why these don't work.
BLACKLIST = [
"Angkor",
"Fasthand",
"Noto",
"Droid"
]
DEBUG_TEMPLATE = """
<!doctype html>
<html>
<head>
<style>
html, body{
font-family:Arial, sans-serif;
}
div.filename, div.darkess{
width:100px;
color:gray;
font-size:10px;
}
img{
margin-left:150px;
}
</style>
</head>
<body>
%s
</body>
</html>
"""
# When outputing the debug HTML, this is used to show a single font.
ENTRY_TEMPLATE = """
<div>
<div class='darkness'>%s</div>
<div class='filename'>%s</div>
<img src="data:image/png;base64,%s" />
</div>
"""
def main():
parser = argparse.ArgumentParser(description='Script to calculate font weights and widths')
parser.add_argument("-f", "--folder", default="*", help="The pattern to match for finding ttfs, eg 'folder_with_fonts/*.ttf'.")
parser.add_argument("-d", "--debug", default=False, help="Debug mode, spins up a server to validate results visually.")
parser.add_argument("-m", "--metric", default="weight", help="What property to measure; either 'weight' or 'width'.")
args = parser.parse_args()
properties = []
fontfiles = glob.glob(args.folder)
for fontfile in fontfiles:
if is_blacklisted(fontfile):
print >> sys.stderr, "%s is blacklisted." % fontfile
continue
#try:
if args.metric == "weight":
properties.append(get_darkness(fontfile))
elif args.metric == "width":
properties.append(get_width(fontfile))
#except:
# print >> sys.stderr, "Couldn't calculate darkness of %s." % fontfile
if args.metric == "width":
normalize_values(properties)
if args.debug:
start_debug_server(properties)
else:
dump_values(properties)
# Normalizes a set of values from 0 - 1.0
def normalize_values(properties):
max_value = 0.0
for i in range(len(properties)):
val = float(properties[i]['value'])
max_value = max(max_value, val)
for i in range(len(properties)):
properties[i]['value'] /= max_value
# Dump the values to the terminal.
def dump_values(properties):
for font in sorted(properties, key=lambda x: x['value']):
print font['fontfile'] + "," + str(font['value'])
# Brings up a HTTP server to host a page for visual inspection
def start_debug_server(properties):
template_contents = ""
for font in sorted(properties, key=lambda x: x['value']):
metric = font['value']
filename = font['fontfile']
base64img = font['base64img']
if metric == 0.0:
print >> sys.stderr, "%s has no metric." % filename
continue
template_contents += ENTRY_TEMPLATE % (metric, filename, base64img)
debug_page_html = DEBUG_TEMPLATE % template_contents
# Handler that responds to all requests with a single file.
class DebugHTTPHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_HEAD(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write(debug_page_html)
httpd = SocketServer.TCPServer(("", PORT), DebugHTTPHandler)
print "Debug page can be seen at http://127.0.0.1:" + str(PORT)
print "Kill the server with Ctrl+C"
httpd.serve_forever()
# Returns whether a font is on the blacklist.
def is_blacklisted(filename):
for name in BLACKLIST:
if name in filename:
return True
return False
# Returns the width, given a filename of a ttf.
# This is in pixels so should be normalized.
def get_width(fontfile):
# Render the test text using the font onto an image.
font = ImageFont.truetype(fontfile, FONT_SIZE)
text_width, text_height = font.getsize(TEXT)
img = Image.new('RGBA', (text_width, text_height))
draw = ImageDraw.Draw(img)
draw.text((0,0), TEXT, font=font, fill=(0,0,0))
return {'value': text_width, 'fontfile': fontfile, 'base64img': get_base64_image(img)}
# Returns the darkness, given a filename of a ttf.
def get_darkness(fontfile):
# Render the test text using the font onto an image.
font = ImageFont.truetype(fontfile, FONT_SIZE)
text_width, text_height = font.getsize(TEXT)
img = Image.new('RGBA', (text_width, text_height))
draw = ImageDraw.Draw(img)
draw.text((0,0), TEXT, font=font, fill=(0,0,0))
# Calculate the average darkness.
histogram = img.histogram()
alpha = histogram[768:]
avg = 0.0
for i, value in enumerate(alpha):
avg += (i / 255.0) * value
try:
darkness = avg / (text_width * text_height)
except:
darkness = 0.0
# Weight the darkness by x-height.
x_height = get_x_height(fontfile)
darkness *= (x_height / FONT_SIZE)
return {'value': darkness, 'fontfile': fontfile, 'base64img': get_base64_image(img)}
# Get the base 64 representation of an image, to use for visual testing.
def get_base64_image(img):
output = StringIO.StringIO()
img.save(output, "PNG")
base64img = output.getvalue().encode("base64")
output.close()
return base64img
# Returns the height of the lowercase "x" in a font.
def get_x_height(fontfile):
font = ImageFont.truetype(fontfile, FONT_SIZE)
_, x_height = font.getsize("x")
return x_height
if __name__ == "__main__":
main()
| jessamynsmith/fontbakery | tools/compute_font_weight_and_width.py | Python | apache-2.0 | 7,384 |
#!/usr/bin/env python
# coding:utf-8
import sys
import imp
import redis
from rq import Queue
import random
from task.worker.PushWorker import push_messenger
def main(msg, config, silent=False):
"""
Job enqueue
:param msg:str
:param config:
:return:
"""
queue_dsn = config["queue"]["dsn"]
redis_conn = redis.from_url(queue_dsn)
q = Queue('low', connection=redis_conn)
ret = q.enqueue(push_messenger, msg, result_ttl=60)
if silent is True:
return ret
else:
print ret
if __name__ == '__main__':
from lib.units import this_file_dir, config_reader
main(config_reader("mServiceWorker", "PushWorker", this_file_dir()))
| smices/mWorkerService | src/task/scheduler/PushWorker.py | Python | mit | 692 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from celery_proj.celery import app
from sharefun import create_app
from sharefun.models import db, Recommendation, User, Category, Comment, Work
from sharefun.script.spider.crawlDoubanWorkInfo import main
from sharefun.config import load_config
from sharefun import get_mail_handler
import os
from subprocess import call
from datetime import datetime, timedelta
from urllib import urlretrieve
import requests
config = load_config()
search_movie_url = "http://api.douban.com/v2/movie/search?q=%s"
movie_url = "http://api.douban.com/v2/movie/subject/%s"
@app.task
def crawller():
"""根据最新推荐抓取相应作品信息
"""
names_file = open(config.NAMES_PATH, 'a+')
flask_app = create_app()
with flask_app.app_context():
for recommendation in Recommendation.query.filter_by(status_id=2):
names_file.write(
recommendation.category.name + ':' + recommendation.name + '\n')
names_file.close()
main(config.NAMES_PATH, config.SUCCESSFUL_NAMES_PATH,
config.FAILED_NAMES_PATH, config.WEBPAGES_PATH, config.COVERS_FOLDER_PATH)
with flask_app.app_context():
"""打补丁
目前只发现部分电影信息直接通过crawlDoubanWorkInfo.py无法抓取,需登陆才能在搜索框中搜索到
通过豆瓣电影api的方式查询到相关电影信息
"""
for recommendation in Recommendation.query.filter_by(status_id=2):
movies_info = requests.get(
search_movie_url % recommendation.name).json()
if movies_info:
subjects = movies_info['subjects']
for subject in subjects:
if subject['title'] == recommendation.name:
# 先查到指定电影名称的电影id
movie_id = subject['id']
if movie_id:
# 利用电影id直接通过api查询相关电影信息
movie_info = requests.get(
movie_url % movie_id).json()
cover_path = os.path.join(
os.path.abspath(os.path.dirname(__name__)), 'sharefun/static/covers', 'movie_' + movie_id + '.jpg')
# 下载封面图片
if not os.path.exists(cover_path):
urlretrieve(
movie_info['images']['large'], cover_path)
work = Work(title=recommendation.name, director=movie_info['directors'][0]['name'], genre='/'.join(movie_info['genres']), score=movie_info['rating'][
'average'], desc=movie_info['summary'], url=movie_info['alt'], cover_url=movie_info['images']['large'], cover_path=cover_path[cover_path.find('static/') + 7:], created=datetime.utcnow(), cate_id=recommendation.cate_id)
db.session.add(work)
recommendation.status_id = 3
db.session.add(recommendation)
db.session.commit()
break
@app.task
def send_mail():
flask_app = create_app()
info = '新注册用户:\n'
with flask_app.app_context():
today = str(datetime.utcnow()).split(' ')[0] + ' 00:00:00.000000'
for user in User.query.filter(User.created > today):
info += user.email + '\n'
if info != '新注册用户:\n':
import logging
logger = logging.getLogger('sf')
logger.addHandler(get_mail_handler())
logger.error(info)
@app.task
def backup():
"""备份mysql数据库"""
t = datetime.now().strftime('%y-%m-%d_%H.%M.%S')
f = 'backup-sf-%s.sql' % t
call("mysqldump -u%s -p%s --skip-opt --add-drop-table --default-character-set=utf8 --quick sf > %s" %
(config.DB_USER, config.DB_PASSWORD, os.path.join('/home/frank/sf-backup', f)), shell=True)
# 不包括reading,只有read和wish
collections_url = "https://api.douban.com/v2/book/user/%s/collections"
collection_url = "https://api.douban.com/v2/book/%s/collection"
@app.task
def sync_with_douban():
"""三件事:一从豆瓣上同步评论到本地;二将本地作品的评论同步到豆瓣;三:将本地推荐的作品同步到豆瓣
一不需要access_token,二三需要
策略:豆瓣本次登陆将access_token存到user的access_token字段中
access_token有效期3920s > 1h,定时任务1h1次,在豆瓣用户登陆账户一小时之类利用有效期内的access_token抓取其评论数据
分析:豆瓣用户每次登陆之后在本站的评论或者推荐信息 与 在豆瓣上的评论信息,在一小时之类必定会与豆瓣进行有效同步
"""
flask_app = create_app()
with flask_app.app_context():
one_hour_ago = datetime.utcnow() + timedelta(hours=-1)
# print one_hour_ago
for user in User.query.filter_by(is_activated=1):
# print user.douban_abbr
# xiaoaifansion
# 不需要豆瓣读书相关的权限
collections_info = requests.get(
collections_url % user.douban_abbr).json()
if collections_info:
recommendations = Recommendation.query.filter_by(status_id=3).filter_by(
user_id=user.id).filter(Recommendation.finished > one_hour_ago).all()
work_dict = {}
for recommendation in recommendations:
work_dict[recommendation.work.url.strip(
'/').split('/')[-1]] = recommendation.work
# print work_dict
# {u'1052241': Work 设计模式}
collection_ids = []
collections = collections_info["collections"]
if collections:
comments = Comment.query.filter_by(user_id=user.id).filter(
Comment.created > one_hour_ago).all()
# crawl comments
# 在豆瓣上已读并已评论的作品中选出其中在系统中已上架的作品,将豆瓣上的评论抓取到本地
for collection in collections:
collection_ids.append(collection['book']['id'])
# 已上架作品
work = Work.query.filter_by(
url=collection['book']['alt']).first()
if work:
# 已读并豆瓣上评论过
if collection['status'] == 'read' and collection['comment']:
# comment = Comment.query.filter_by(
# content=collection['comment']).first()
comment = Comment.query.filter_by(
user_id=user.id).filter_by(work_id=work.id).first()
# 该评论没有被抓取,则新增评论添加到系统
if not comment:
# 豆瓣上评论时间为utc+8,变为utc+0存到服务器
comment = Comment(
content=collection['comment'], user_id=user.id, work_id=work.id, created=datetime.strptime(collection['updated'], "%Y-%m-%d %H:%M:%S") + timedelta(hours=-8))
else: # 若系统中已经添加了该评论,则直接修改内容
comment.content = collection['comment']
# print comment.content
# 测试
db.session.add(comment)
db.session.commit()
access_token = user.access_token
headers = {
"Authorization": "Bearer %s" % access_token}
# print comments
# [Comment 20, Comment 21]
# print config.DOUBAN_CLIENT_ID
# 088cbee5d793b72a19a63cb186bb257e
# print access_token
# 4b02cc1fdfae6fa9e108645f9f0b4efb
# print headers
# {'Authorization': u'Bearer 4b02cc1fdfae6fa9e108645f9f0b4efb'}
if access_token:
for collection in collections:
# push comments
# 将系统中已上架作品的评论同步到豆瓣
# 需要权限,目前会失败
# push成功20150126
for comment in comments:
if comment.user_id == user.id and collection['book']['alt'] == comment.work.url:
data = {
'status': collection['status'],
'comment': comment.content,
'scope': 'douban_basic_common'
}
res = requests.put(
collection_url % collection['book']['id'], data, headers=headers)
# print res.status_code
# 202
# print res.content
# print comment
break
# push recommendations
# 在系统中推荐,将推荐作品同步到豆瓣收藏
# 需要权限,目前会失败
# push成功20150126
# print collection_ids
# [u'1052241', u'1858513', u'6709783', u'2567698', u'1230413', u'1788421', u'24738302', u'6021440', u'1084336', u'3117898', u'3688489', u'3323633', u'1894695', u'1786387', u'2209702', u'6709809', u'11229716', u'25814739', u'25863621', u'25900403']
for work_id, work in work_dict.iteritems():
if not work_id in collection_ids:
data = {
'status': 'read',
'comment': work.recommendation.recomm_reason,
'scope': 'douban_basic_common'
}
res = requests.post(
collection_url % work_id, data, headers=headers)
# print res.status_code
# print res.content
@app.task
def add(x, y):
return x + y
| Fansion/sharefun | celery_proj/tasks.py | Python | mit | 10,916 |
#!/usr/bin/env python
from __future__ import print_function
import linchpin.FilterUtils.FilterUtils as filter_utils
class FilterModule(object):
''' A filter to fix network format '''
def filters(self):
return {
'map_results': filter_utils.map_results
}
| samvarankashyap/linch-pin | linchpin/provision/roles/azure/filter_plugins/map_results.py | Python | gpl-3.0 | 292 |
#!/usr/bin/env python
import io
import os
import re
import sys
from setuptools import setup, find_packages
PACKAGE = "lancet"
if sys.argv[-1] == "publish":
os.system("python setup.py sdist bdist_wheel upload")
sys.exit()
class Setup(object):
@staticmethod
def read(fname, fail_silently=False):
"""
Read the content of the given file. The path is evaluated from the
directory containing this file.
"""
try:
filepath = os.path.join(os.path.dirname(__file__), fname)
with io.open(filepath, "rt", encoding="utf8") as f:
return f.read()
except Exception:
if not fail_silently:
raise
return ""
@staticmethod
def requirements(fname):
"""
Create a list of requirements from the output of the pip freeze command
saved in a text file.
"""
packages = Setup.read(fname, fail_silently=True).split("\n")
packages = (p.strip() for p in packages)
packages = (p for p in packages if p and not p.startswith("#"))
packages = (p for p in packages if p and not p.startswith("https://"))
return list(packages)
@staticmethod
def get_files(*bases):
"""
List all files in a data directory.
"""
for base in bases:
basedir, _ = base.split(".", 1)
base = os.path.join(os.path.dirname(__file__), *base.split("."))
rem = len(os.path.dirname(base)) + len(basedir) + 2
for root, dirs, files in os.walk(base):
for name in files:
yield os.path.join(basedir, root, name)[rem:]
@staticmethod
def version():
data = Setup.read(os.path.join(PACKAGE, "__init__.py"))
version = (
re.search(r'__version__\s*=\s*u?"([^"]+)"', data).group(1).strip()
)
return version
@staticmethod
def url():
data = Setup.read(os.path.join(PACKAGE, "__init__.py"))
version = (
re.search(r'__url__\s*=\s*u?"([^"]+)"', data).group(1).strip()
)
return version
@staticmethod
def longdesc():
return Setup.read("README.rst") + "\n\n" + Setup.read("HISTORY.rst")
@staticmethod
def test_links():
# Test if hardlinks work. This is a workaround until
# http://bugs.python.org/issue8876 is solved
if hasattr(os, "link"):
tempfile = __file__ + ".tmp"
try:
os.link(__file__, tempfile)
except OSError as e:
if e.errno == 1: # Operation not permitted
del os.link
else:
raise
finally:
if os.path.exists(tempfile):
os.remove(tempfile)
Setup.test_links()
setup(
name=PACKAGE,
version=Setup.version(),
author="Jonathan Stoppani",
author_email="jonathan@stoppani.name",
include_package_data=True,
zip_safe=False,
url=Setup.url(),
license="MIT",
packages=find_packages(),
package_dir={PACKAGE: PACKAGE},
description="Lancet",
install_requires=Setup.requirements("requirements.txt"),
long_description=Setup.longdesc(),
entry_points=Setup.read("entry-points.ini", True),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
| GaretJax/lancet | setup.py | Python | mit | 3,730 |
import datetime
import requests
import time
__author__ = 'ApigeeCorporation'
def total_milliseconds(td):
return (td.microseconds + td.seconds * 1000000) / 1000
url_template = "http://localhost:9200/_cat/pending_tasks?v'"
x = 0
SLEEP_TIME = 3
while True:
x += 13
try:
r = requests.get(url=url_template)
lines = r.text.split('\n')
print '\n-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-'
print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
print datetime.datetime.utcnow()
if len(lines) > 1:
print r.text
else:
print 'None'
print '-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-'
print '-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-\n'
except:
pass
time.sleep(SLEEP_TIME)
| jwest-apigee/usergrid-util-python | es_tools/monitor_tasks.py | Python | mit | 855 |
# ext/declarative/api.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Public API functions and helpers for declarative."""
from ...schema import Table, MetaData
from ...orm import synonym as _orm_synonym, mapper,\
comparable_property,\
interfaces
from ...orm.util import polymorphic_union, _mapper_or_none
from ... import exc
import weakref
from .base import _as_declarative, \
_declarative_constructor,\
_MapperConfig, _add_attribute
def instrument_declarative(cls, registry, metadata):
"""Given a class, configure the class declaratively,
using the given registry, which can be any dictionary, and
MetaData object.
"""
if '_decl_class_registry' in cls.__dict__:
raise exc.InvalidRequestError(
"Class %r already has been "
"instrumented declaratively" % cls)
cls._decl_class_registry = registry
cls.metadata = metadata
_as_declarative(cls, cls.__name__, cls.__dict__)
def has_inherited_table(cls):
"""Given a class, return True if any of the classes it inherits from has a
mapped table, otherwise return False.
"""
for class_ in cls.__mro__[1:]:
if getattr(class_, '__table__', None) is not None:
return True
return False
class DeclarativeMeta(type):
def __init__(cls, classname, bases, dict_):
if '_decl_class_registry' not in cls.__dict__:
_as_declarative(cls, classname, cls.__dict__)
type.__init__(cls, classname, bases, dict_)
def __setattr__(cls, key, value):
_add_attribute(cls, key, value)
def synonym_for(name, map_column=False):
"""Decorator, make a Python @property a query synonym for a column.
A decorator version of :func:`~sqlalchemy.orm.synonym`. The function being
decorated is the 'descriptor', otherwise passes its arguments through to
synonym()::
@synonym_for('col')
@property
def prop(self):
return 'special sauce'
The regular ``synonym()`` is also usable directly in a declarative setting
and may be convenient for read/write properties::
prop = synonym('col', descriptor=property(_read_prop, _write_prop))
"""
def decorate(fn):
return _orm_synonym(name, map_column=map_column, descriptor=fn)
return decorate
def comparable_using(comparator_factory):
"""Decorator, allow a Python @property to be used in query criteria.
This is a decorator front end to
:func:`~sqlalchemy.orm.comparable_property` that passes
through the comparator_factory and the function being decorated::
@comparable_using(MyComparatorType)
@property
def prop(self):
return 'special sauce'
The regular ``comparable_property()`` is also usable directly in a
declarative setting and may be convenient for read/write properties::
prop = comparable_property(MyComparatorType)
"""
def decorate(fn):
return comparable_property(comparator_factory, fn)
return decorate
class declared_attr(interfaces._MappedAttribute, property):
"""Mark a class-level method as representing the definition of
a mapped property or special declarative member name.
@declared_attr turns the attribute into a scalar-like
property that can be invoked from the uninstantiated class.
Declarative treats attributes specifically marked with
@declared_attr as returning a construct that is specific
to mapping or declarative table configuration. The name
of the attribute is that of what the non-dynamic version
of the attribute would be.
@declared_attr is more often than not applicable to mixins,
to define relationships that are to be applied to different
implementors of the class::
class ProvidesUser(object):
"A mixin that adds a 'user' relationship to classes."
@declared_attr
def user(self):
return relationship("User")
It also can be applied to mapped classes, such as to provide
a "polymorphic" scheme for inheritance::
class Employee(Base):
id = Column(Integer, primary_key=True)
type = Column(String(50), nullable=False)
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Employee':
return {
"polymorphic_on":cls.type,
"polymorphic_identity":"Employee"
}
else:
return {"polymorphic_identity":cls.__name__}
.. versionchanged:: 0.8 :class:`.declared_attr` can be used with
non-ORM or extension attributes, such as user-defined attributes
or :func:`.association_proxy` objects, which will be assigned
to the class at class construction time.
"""
def __init__(self, fget, *arg, **kw):
super(declared_attr, self).__init__(fget, *arg, **kw)
self.__doc__ = fget.__doc__
def __get__(desc, self, cls):
return desc.fget(cls)
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
name='Base', constructor=_declarative_constructor,
class_registry=None,
metaclass=DeclarativeMeta):
"""Construct a base class for declarative class definitions.
The new base class will be given a metaclass that produces
appropriate :class:`~sqlalchemy.schema.Table` objects and makes
the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
information provided declaratively in the class and any subclasses
of the class.
:param bind: An optional
:class:`~sqlalchemy.engine.base.Connectable`, will be assigned
the ``bind`` attribute on the :class:`~sqlalchemy.MetaData`
instance.
:param metadata:
An optional :class:`~sqlalchemy.MetaData` instance. All
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
subclasses of the base will share this MetaData. A MetaData instance
will be created if none is provided. The
:class:`~sqlalchemy.MetaData` instance will be available via the
`metadata` attribute of the generated declarative base class.
:param mapper:
An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will
be used to map subclasses to their Tables.
:param cls:
Defaults to :class:`object`. A type to use as the base for the generated
declarative base class. May be a class or tuple of classes.
:param name:
Defaults to ``Base``. The display name for the generated
class. Customizing this is not required, but can improve clarity in
tracebacks and debugging.
:param constructor:
Defaults to
:func:`~sqlalchemy.ext.declarative._declarative_constructor`, an
__init__ implementation that assigns \**kwargs for declared
fields and relationships to an instance. If ``None`` is supplied,
no __init__ will be provided and construction will fall back to
cls.__init__ by way of the normal Python semantics.
:param class_registry: optional dictionary that will serve as the
registry of class names-> mapped classes when string names
are used to identify classes inside of :func:`.relationship`
and others. Allows two or more declarative base classes
to share the same registry of class names for simplified
inter-base relationships.
:param metaclass:
Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__
compatible callable to use as the meta type of the generated
declarative base class.
"""
lcl_metadata = metadata or MetaData()
if bind:
lcl_metadata.bind = bind
if class_registry is None:
class_registry = weakref.WeakValueDictionary()
bases = not isinstance(cls, tuple) and (cls,) or cls
class_dict = dict(_decl_class_registry=class_registry,
metadata=lcl_metadata)
if constructor:
class_dict['__init__'] = constructor
if mapper:
class_dict['__mapper_cls__'] = mapper
return metaclass(name, bases, class_dict)
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
:class:`.ConcreteBase` will use the :func:`.polymorphic_union`
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
a hook for the :func:`.MapperEvents.after_configured` event.
:class:`.ConcreteBase` produces a mapped
table for the class itself. Compare to :class:`.AbstractConcreteBase`,
which does not.
Example::
from sqlalchemy.ext.declarative import ConcreteBase
class Employee(ConcreteBase, Base):
__tablename__ = 'employee'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
__mapper_args__ = {
'polymorphic_identity':'employee',
'concrete':True}
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
manager_data = Column(String(40))
__mapper_args__ = {
'polymorphic_identity':'manager',
'concrete':True}
"""
@classmethod
def _create_polymorphic_union(cls, mappers):
return polymorphic_union(dict(
(mp.polymorphic_identity, mp.local_table)
for mp in mappers
), 'type', 'pjoin')
@classmethod
def __declare_last__(cls):
m = cls.__mapper__
if m.with_polymorphic:
return
mappers = list(m.self_and_descendants)
pjoin = cls._create_polymorphic_union(mappers)
m._set_with_polymorphic(("*", pjoin))
m._set_polymorphic_on(pjoin.c.type)
class AbstractConcreteBase(ConcreteBase):
"""A helper class for 'concrete' declarative mappings.
:class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union`
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
a hook for the :func:`.MapperEvents.after_configured` event.
:class:`.AbstractConcreteBase` does not produce a mapped
table for the class itself. Compare to :class:`.ConcreteBase`,
which does.
Example::
from sqlalchemy.ext.declarative import ConcreteBase
class Employee(AbstractConcreteBase, Base):
pass
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
manager_data = Column(String(40))
__mapper_args__ = {
'polymorphic_identity':'manager',
'concrete':True}
"""
__abstract__ = True
@classmethod
def __declare_last__(cls):
if hasattr(cls, '__mapper__'):
return
# can't rely on 'self_and_descendants' here
# since technically an immediate subclass
# might not be mapped, but a subclass
# may be.
mappers = []
stack = list(cls.__subclasses__())
while stack:
klass = stack.pop()
stack.extend(klass.__subclasses__())
mn = _mapper_or_none(klass)
if mn is not None:
mappers.append(mn)
pjoin = cls._create_polymorphic_union(mappers)
cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type)
for scls in cls.__subclasses__():
sm = _mapper_or_none(scls)
if sm.concrete and cls in scls.__bases__:
sm._set_concrete_base(m)
class DeferredReflection(object):
"""A helper class for construction of mappings based on
a deferred reflection step.
Normally, declarative can be used with reflection by
setting a :class:`.Table` object using autoload=True
as the ``__table__`` attribute on a declarative class.
The caveat is that the :class:`.Table` must be fully
reflected, or at the very least have a primary key column,
at the point at which a normal declarative mapping is
constructed, meaning the :class:`.Engine` must be available
at class declaration time.
The :class:`.DeferredReflection` mixin moves the construction
of mappers to be at a later point, after a specific
method is called which first reflects all :class:`.Table`
objects created so far. Classes can define it as such::
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import DeferredReflection
Base = declarative_base()
class MyClass(DeferredReflection, Base):
__tablename__ = 'mytable'
Above, ``MyClass`` is not yet mapped. After a series of
classes have been defined in the above fashion, all tables
can be reflected and mappings created using
:meth:`.DeferredReflection.prepare`::
engine = create_engine("someengine://...")
DeferredReflection.prepare(engine)
The :class:`.DeferredReflection` mixin can be applied to individual
classes, used as the base for the declarative base itself,
or used in a custom abstract class. Using an abstract base
allows that only a subset of classes to be prepared for a
particular prepare step, which is necessary for applications
that use more than one engine. For example, if an application
has two engines, you might use two bases, and prepare each
separately, e.g.::
class ReflectedOne(DeferredReflection, Base):
__abstract__ = True
class ReflectedTwo(DeferredReflection, Base):
__abstract__ = True
class MyClass(ReflectedOne):
__tablename__ = 'mytable'
class MyOtherClass(ReflectedOne):
__tablename__ = 'myothertable'
class YetAnotherClass(ReflectedTwo):
__tablename__ = 'yetanothertable'
# ... etc.
Above, the class hierarchies for ``ReflectedOne`` and
``ReflectedTwo`` can be configured separately::
ReflectedOne.prepare(engine_one)
ReflectedTwo.prepare(engine_two)
.. versionadded:: 0.8
"""
@classmethod
def prepare(cls, engine):
"""Reflect all :class:`.Table` objects for all current
:class:`.DeferredReflection` subclasses"""
to_map = [m for m in _MapperConfig.configs.values()
if issubclass(m.cls, cls)]
for thingy in to_map:
cls._sa_decl_prepare(thingy.local_table, engine)
thingy.map()
@classmethod
def _sa_decl_prepare(cls, local_table, engine):
# autoload Table, which is already
# present in the metadata. This
# will fill in db-loaded columns
# into the existing Table object.
if local_table is not None:
Table(local_table.name,
local_table.metadata,
extend_existing=True,
autoload_replace=False,
autoload=True,
autoload_with=engine,
schema=local_table.schema)
| rclmenezes/sqlalchemy | lib/sqlalchemy/ext/declarative/api.py | Python | mit | 15,843 |
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
# copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:thenault@gmail.com
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""tests for specific behaviour of astng scoped nodes (i.e. module, class and
function)
"""
import sys
from os.path import join, abspath, dirname
from logilab.common.testlib import TestCase, unittest_main
from logilab.astng import builder, nodes, scoped_nodes, \
BUILTINS_MODULE, InferenceError, NotFoundError
from logilab.astng.bases import Instance, BoundMethod, UnboundMethod
abuilder = builder.ASTNGBuilder()
DATA = join(dirname(abspath(__file__)), 'data')
REGRTEST_DATA = join(dirname(abspath(__file__)), 'regrtest_data')
MODULE = abuilder.file_build(join(DATA, 'module.py'), 'data.module')
MODULE2 = abuilder.file_build(join(DATA, 'module2.py'), 'data.module2')
NONREGR = abuilder.file_build(join(DATA, 'nonregr.py'), 'data.nonregr')
PACK = abuilder.file_build(join(DATA, '__init__.py'), 'data')
def _test_dict_interface(self, node, test_attr):
self.assert_(node[test_attr] is node[test_attr])
self.assert_(test_attr in node)
node.keys()
node.values()
node.items()
iter(node)
class ModuleNodeTC(TestCase):
def test_special_attributes(self):
self.assertEqual(len(MODULE.getattr('__name__')), 1)
self.assertIsInstance(MODULE.getattr('__name__')[0], nodes.Const)
self.assertEqual(MODULE.getattr('__name__')[0].value, 'data.module')
self.assertEqual(len(MODULE.getattr('__doc__')), 1)
self.assertIsInstance(MODULE.getattr('__doc__')[0], nodes.Const)
self.assertEqual(MODULE.getattr('__doc__')[0].value, 'test module for astng\n')
self.assertEqual(len(MODULE.getattr('__file__')), 1)
self.assertIsInstance(MODULE.getattr('__file__')[0], nodes.Const)
self.assertEqual(MODULE.getattr('__file__')[0].value, join(DATA, 'module.py'))
self.assertEqual(len(MODULE.getattr('__dict__')), 1)
self.assertIsInstance(MODULE.getattr('__dict__')[0], nodes.Dict)
self.assertRaises(NotFoundError, MODULE.getattr, '__path__')
self.assertEqual(len(PACK.getattr('__path__')), 1)
self.assertIsInstance(PACK.getattr('__path__')[0], nodes.List)
def test_dict_interface(self):
_test_dict_interface(self, MODULE, 'YO')
def test_getattr(self):
yo = MODULE.getattr('YO')[0]
self.assertIsInstance(yo, nodes.Class)
self.assertEqual(yo.name, 'YO')
red = MODULE.igetattr('redirect').next()
self.assertIsInstance(red, nodes.Function)
self.assertEqual(red.name, 'four_args')
spawn = MODULE.igetattr('spawn').next()
self.assertIsInstance(spawn, nodes.Class)
self.assertEqual(spawn.name, 'Execute')
# resolve packageredirection
sys.path.insert(1, DATA)
mod = abuilder.file_build(join(DATA, 'appl/myConnection.py'),
'appl.myConnection')
try:
ssl = mod.igetattr('SSL1').next()
cnx = ssl.igetattr('Connection').next()
self.assertEqual(cnx.__class__, nodes.Class)
self.assertEqual(cnx.name, 'Connection')
self.assertEqual(cnx.root().name, 'SSL1.Connection1')
finally:
del sys.path[1]
self.assertEqual(len(NONREGR.getattr('enumerate')), 2)
# raise ResolveError
self.assertRaises(InferenceError, MODULE.igetattr, 'YOAA')
def test_wildard_import_names(self):
m = abuilder.file_build(join(DATA, 'all.py'), 'all')
self.assertEqual(m.wildcard_import_names(), ['Aaa', '_bla', 'name'])
m = abuilder.file_build(join(DATA, 'notall.py'), 'notall')
res = sorted(m.wildcard_import_names())
self.assertEqual(res, ['Aaa', 'func', 'name', 'other'])
def test_module_getattr(self):
data = '''
appli = application
appli += 2
del appli
'''
astng = abuilder.string_build(data, __name__, __file__)
# test del statement not returned by getattr
self.assertEqual(len(astng.getattr('appli')), 2,
astng.getattr('appli'))
def test_relative_to_absolute_name(self):
# package
mod = nodes.Module('very.multi.package', 'doc')
mod.package = True
modname = mod.relative_to_absolute_name('utils', 1)
self.assertEqual(modname, 'very.multi.package.utils')
modname = mod.relative_to_absolute_name('utils', 2)
self.assertEqual(modname, 'very.multi.utils')
modname = mod.relative_to_absolute_name('utils', 0)
self.assertEqual(modname, 'very.multi.package.utils')
modname = mod.relative_to_absolute_name('', 1)
self.assertEqual(modname, 'very.multi.package')
# non package
mod = nodes.Module('very.multi.module', 'doc')
mod.package = False
modname = mod.relative_to_absolute_name('utils', 0)
self.assertEqual(modname, 'very.multi.utils')
modname = mod.relative_to_absolute_name('utils', 1)
self.assertEqual(modname, 'very.multi.utils')
modname = mod.relative_to_absolute_name('utils', 2)
self.assertEqual(modname, 'very.utils')
modname = mod.relative_to_absolute_name('', 1)
self.assertEqual(modname, 'very.multi')
def test_import_1(self):
data = '''from . import subpackage'''
astng = abuilder.string_build(data, 'package', join(REGRTEST_DATA, 'package', '__init__.py'))
sys.path.insert(1, REGRTEST_DATA)
try:
m = astng.import_module('', level=1)
self.assertEqual(m.name, 'package')
infered = list(astng.igetattr('subpackage'))
self.assertEqual(len(infered), 1)
self.assertEqual(infered[0].name, 'package.subpackage')
finally:
del sys.path[1]
def test_import_2(self):
data = '''from . import subpackage as pouet'''
astng = abuilder.string_build(data, 'package', join(dirname(abspath(__file__)), 'regrtest_data', 'package', '__init__.py'))
sys.path.insert(1, REGRTEST_DATA)
try:
m = astng.import_module('', level=1)
self.assertEqual(m.name, 'package')
infered = list(astng.igetattr('pouet'))
self.assertEqual(len(infered), 1)
self.assertEqual(infered[0].name, 'package.subpackage')
finally:
del sys.path[1]
class FunctionNodeTC(TestCase):
def test_special_attributes(self):
func = MODULE2['make_class']
self.assertEqual(len(func.getattr('__name__')), 1)
self.assertIsInstance(func.getattr('__name__')[0], nodes.Const)
self.assertEqual(func.getattr('__name__')[0].value, 'make_class')
self.assertEqual(len(func.getattr('__doc__')), 1)
self.assertIsInstance(func.getattr('__doc__')[0], nodes.Const)
self.assertEqual(func.getattr('__doc__')[0].value, 'check base is correctly resolved to Concrete0')
self.assertEqual(len(MODULE.getattr('__dict__')), 1)
self.assertIsInstance(MODULE.getattr('__dict__')[0], nodes.Dict)
def test_dict_interface(self):
_test_dict_interface(self, MODULE['global_access'], 'local')
def test_default_value(self):
func = MODULE2['make_class']
self.assertIsInstance(func.args.default_value('base'), nodes.Getattr)
self.assertRaises(scoped_nodes.NoDefault, func.args.default_value, 'args')
self.assertRaises(scoped_nodes.NoDefault, func.args.default_value, 'kwargs')
self.assertRaises(scoped_nodes.NoDefault, func.args.default_value, 'any')
#self.assertIsInstance(func.mularg_class('args'), nodes.Tuple)
#self.assertIsInstance(func.mularg_class('kwargs'), nodes.Dict)
#self.assertEqual(func.mularg_class('base'), None)
def test_navigation(self):
function = MODULE['global_access']
self.assertEqual(function.statement(), function)
l_sibling = function.previous_sibling()
# check taking parent if child is not a stmt
self.assertIsInstance(l_sibling, nodes.Assign)
child = function.args.args[0]
self.assert_(l_sibling is child.previous_sibling())
r_sibling = function.next_sibling()
self.assertIsInstance(r_sibling, nodes.Class)
self.assertEqual(r_sibling.name, 'YO')
self.assert_(r_sibling is child.next_sibling())
last = r_sibling.next_sibling().next_sibling().next_sibling()
self.assertIsInstance(last, nodes.Assign)
self.assertEqual(last.next_sibling(), None)
first = l_sibling.previous_sibling().previous_sibling().previous_sibling().previous_sibling().previous_sibling()
self.assertEqual(first.previous_sibling(), None)
def test_nested_args(self):
if sys.version_info >= (3, 0):
self.skipTest("nested args has been removed in py3.x")
code = '''
def nested_args(a, (b, c, d)):
"nested arguments test"
'''
tree = abuilder.string_build(code)
func = tree['nested_args']
self.assertEqual(sorted(func.locals), ['a', 'b', 'c', 'd'])
self.assertEqual(func.args.format_args(), 'a, (b, c, d)')
def test_four_args(self):
func = MODULE['four_args']
#self.assertEqual(func.args.args, ['a', ('b', 'c', 'd')])
local = sorted(func.keys())
self.assertEqual(local, ['a', 'b', 'c', 'd'])
self.assertEqual(func.type, 'function')
def test_format_args(self):
func = MODULE2['make_class']
self.assertEqual(func.args.format_args(), 'any, base=data.module.YO, *args, **kwargs')
func = MODULE['four_args']
self.assertEqual(func.args.format_args(), 'a, b, c, d')
def test_is_abstract(self):
method = MODULE2['AbstractClass']['to_override']
self.assert_(method.is_abstract(pass_is_abstract=False))
self.failUnlessEqual(method.qname(), 'data.module2.AbstractClass.to_override')
self.failUnlessEqual(method.pytype(), '%s.instancemethod' % BUILTINS_MODULE)
method = MODULE2['AbstractClass']['return_something']
self.assert_(not method.is_abstract(pass_is_abstract=False))
# non regression : test raise "string" doesn't cause an exception in is_abstract
func = MODULE2['raise_string']
self.assert_(not func.is_abstract(pass_is_abstract=False))
## def test_raises(self):
## method = MODULE2['AbstractClass']['to_override']
## self.assertEqual([str(term) for term in method.raises()],
## ["CallFunc(Name('NotImplementedError'), [], None, None)"] )
## def test_returns(self):
## method = MODULE2['AbstractClass']['return_something']
## # use string comp since Node doesn't handle __cmp__
## self.assertEqual([str(term) for term in method.returns()],
## ["Const('toto')", "Const(None)"])
def test_lambda_pytype(self):
data = '''
def f():
g = lambda: None
'''
astng = abuilder.string_build(data, __name__, __file__)
g = list(astng['f'].ilookup('g'))[0]
self.failUnlessEqual(g.pytype(), '%s.function' % BUILTINS_MODULE)
def test_lambda_qname(self):
astng = abuilder.string_build('''
lmbd = lambda: None
''', __name__, __file__)
self.assertEqual('%s.<lambda>' % __name__, astng['lmbd'].parent.value.qname())
def test_is_method(self):
data = '''
class A:
def meth1(self):
return 1
@classmethod
def meth2(cls):
return 2
@staticmethod
def meth3():
return 3
def function():
return 0
@staticmethod
def sfunction():
return -1
'''
astng = abuilder.string_build(data, __name__, __file__)
self.failUnless(astng['A']['meth1'].is_method())
self.failUnless(astng['A']['meth2'].is_method())
self.failUnless(astng['A']['meth3'].is_method())
self.failIf(astng['function'].is_method())
self.failIf(astng['sfunction'].is_method())
def test_argnames(self):
if sys.version_info < (3, 0):
code = 'def f(a, (b, c), *args, **kwargs): pass'
else:
code = 'def f(a, b, c, *args, **kwargs): pass'
astng = abuilder.string_build(code, __name__, __file__)
self.assertEqual(astng['f'].argnames(), ['a', 'b', 'c', 'args', 'kwargs'])
def test_return_nothing(self):
"""test infered value on a function with empty return"""
data = '''
def func():
return
a = func()
'''
astng = abuilder.string_build(data, __name__, __file__)
call = astng.body[1].value
func_vals = call.infered()
self.assertEqual(len(func_vals), 1)
self.assertIsInstance(func_vals[0], nodes.Const)
self.assertEqual(func_vals[0].value, None)
def test_func_instance_attr(self):
"""test instance attributes for functions"""
data= """
def test():
print(test.bar)
test.bar = 1
test()
"""
astng = abuilder.string_build(data, 'mod', __file__)
func = astng.body[2].value.func.infered()[0]
self.assertIsInstance(func, nodes.Function)
self.assertEqual(func.name, 'test')
one = func.getattr('bar')[0].infered()[0]
self.assertIsInstance(one, nodes.Const)
self.assertEqual(one.value, 1)
class ClassNodeTC(TestCase):
def test_dict_interface(self):
_test_dict_interface(self, MODULE['YOUPI'], 'method')
def test_cls_special_attributes_1(self):
cls = MODULE['YO']
self.assertEqual(len(cls.getattr('__bases__')), 1)
self.assertEqual(len(cls.getattr('__name__')), 1)
self.assertIsInstance(cls.getattr('__name__')[0], nodes.Const)
self.assertEqual(cls.getattr('__name__')[0].value, 'YO')
self.assertEqual(len(cls.getattr('__doc__')), 1)
self.assertIsInstance(cls.getattr('__doc__')[0], nodes.Const)
self.assertEqual(cls.getattr('__doc__')[0].value, 'hehe')
self.assertEqual(len(cls.getattr('__module__')), 1)
self.assertIsInstance(cls.getattr('__module__')[0], nodes.Const)
self.assertEqual(cls.getattr('__module__')[0].value, 'data.module')
self.assertEqual(len(cls.getattr('__dict__')), 1)
self.assertRaises(NotFoundError, cls.getattr, '__mro__')
for cls in (nodes.List._proxied, nodes.Const(1)._proxied):
self.assertEqual(len(cls.getattr('__bases__')), 1)
self.assertEqual(len(cls.getattr('__name__')), 1)
self.assertEqual(len(cls.getattr('__doc__')), 1, (cls, cls.getattr('__doc__')))
self.assertEqual(cls.getattr('__doc__')[0].value, cls.doc)
self.assertEqual(len(cls.getattr('__module__')), 1)
self.assertEqual(len(cls.getattr('__dict__')), 1)
self.assertEqual(len(cls.getattr('__mro__')), 1)
def test_cls_special_attributes_2(self):
astng = abuilder.string_build('''
class A: pass
class B: pass
A.__bases__ += (B,)
''', __name__, __file__)
self.assertEqual(len(astng['A'].getattr('__bases__')), 2)
self.assertIsInstance(astng['A'].getattr('__bases__')[0], nodes.Tuple)
self.assertIsInstance(astng['A'].getattr('__bases__')[1], nodes.AssAttr)
def test_instance_special_attributes(self):
for inst in (Instance(MODULE['YO']), nodes.List(), nodes.Const(1)):
self.assertRaises(NotFoundError, inst.getattr, '__mro__')
self.assertRaises(NotFoundError, inst.getattr, '__bases__')
self.assertRaises(NotFoundError, inst.getattr, '__name__')
self.assertEqual(len(inst.getattr('__dict__')), 1)
self.assertEqual(len(inst.getattr('__doc__')), 1)
def test_navigation(self):
klass = MODULE['YO']
self.assertEqual(klass.statement(), klass)
l_sibling = klass.previous_sibling()
self.assert_(isinstance(l_sibling, nodes.Function), l_sibling)
self.assertEqual(l_sibling.name, 'global_access')
r_sibling = klass.next_sibling()
self.assertIsInstance(r_sibling, nodes.Class)
self.assertEqual(r_sibling.name, 'YOUPI')
def test_local_attr_ancestors(self):
klass2 = MODULE['YOUPI']
it = klass2.local_attr_ancestors('__init__')
anc_klass = it.next()
self.assertIsInstance(anc_klass, nodes.Class)
self.assertEqual(anc_klass.name, 'YO')
self.assertRaises(StopIteration, it.next)
it = klass2.local_attr_ancestors('method')
self.assertRaises(StopIteration, it.next)
def test_instance_attr_ancestors(self):
klass2 = MODULE['YOUPI']
it = klass2.instance_attr_ancestors('yo')
anc_klass = it.next()
self.assertIsInstance(anc_klass, nodes.Class)
self.assertEqual(anc_klass.name, 'YO')
self.assertRaises(StopIteration, it.next)
klass2 = MODULE['YOUPI']
it = klass2.instance_attr_ancestors('member')
self.assertRaises(StopIteration, it.next)
def test_methods(self):
klass2 = MODULE['YOUPI']
methods = sorted([m.name for m in klass2.methods()])
self.assertEqual(methods, ['__init__', 'class_method',
'method', 'static_method'])
methods = [m.name for m in klass2.mymethods()]
methods.sort()
self.assertEqual(methods, ['__init__', 'class_method',
'method', 'static_method'])
klass2 = MODULE2['Specialization']
methods = [m.name for m in klass2.mymethods()]
methods.sort()
self.assertEqual(methods, [])
method_locals = klass2.local_attr('method')
self.assertEqual(len(method_locals), 1)
self.assertEqual(method_locals[0].name, 'method')
self.assertRaises(NotFoundError, klass2.local_attr, 'nonexistant')
methods = [m.name for m in klass2.methods()]
methods.sort()
self.assertEqual(methods, ['__init__', 'class_method',
'method', 'static_method'])
#def test_rhs(self):
# my_dict = MODULE['MY_DICT']
# self.assertIsInstance(my_dict.rhs(), nodes.Dict)
# a = MODULE['YO']['a']
# value = a.rhs()
# self.assertIsInstance(value, nodes.Const)
# self.assertEqual(value.value, 1)
def test_ancestors(self):
klass = MODULE['YOUPI']
ancs = [a.name for a in klass.ancestors()]
self.assertEqual(ancs, ['YO'])
klass = MODULE2['Specialization']
ancs = [a.name for a in klass.ancestors()]
self.assertEqual(ancs, ['YOUPI', 'YO'])
def test_type(self):
klass = MODULE['YOUPI']
self.assertEqual(klass.type, 'class')
klass = MODULE2['Metaclass']
self.assertEqual(klass.type, 'metaclass')
klass = MODULE2['MyException']
self.assertEqual(klass.type, 'exception')
klass = MODULE2['MyIFace']
self.assertEqual(klass.type, 'interface')
klass = MODULE2['MyError']
self.assertEqual(klass.type, 'exception')
def test_interfaces(self):
for klass, interfaces in (('Concrete0', ['MyIFace']),
('Concrete1', ['MyIFace', 'AnotherIFace']),
('Concrete2', ['MyIFace', 'AnotherIFace']),
('Concrete23', ['MyIFace', 'AnotherIFace'])):
klass = MODULE2[klass]
self.assertEqual([i.name for i in klass.interfaces()],
interfaces)
def test_concat_interfaces(self):
astng = abuilder.string_build('''
class IMachin: pass
class Correct2:
"""docstring"""
__implements__ = (IMachin,)
class BadArgument:
"""docstring"""
__implements__ = (IMachin,)
class InterfaceCanNowBeFound:
"""docstring"""
__implements__ = BadArgument.__implements__ + Correct2.__implements__
''')
self.assertEqual([i.name for i in astng['InterfaceCanNowBeFound'].interfaces()],
['IMachin'])
def test_inner_classes(self):
eee = NONREGR['Ccc']['Eee']
self.assertEqual([n.name for n in eee.ancestors()], ['Ddd', 'Aaa', 'object'])
def test_classmethod_attributes(self):
data = '''
class WebAppObject(object):
def registered(cls, application):
cls.appli = application
cls.schema = application.schema
cls.config = application.config
return cls
registered = classmethod(registered)
'''
astng = abuilder.string_build(data, __name__, __file__)
cls = astng['WebAppObject']
self.assertEqual(sorted(cls.locals.keys()),
['appli', 'config', 'registered', 'schema'])
def test_class_getattr(self):
data = '''
class WebAppObject(object):
appli = application
appli += 2
del self.appli
'''
astng = abuilder.string_build(data, __name__, __file__)
cls = astng['WebAppObject']
# test del statement not returned by getattr
self.assertEqual(len(cls.getattr('appli')), 2)
def test_instance_getattr(self):
data = '''
class WebAppObject(object):
def __init__(self, application):
self.appli = application
self.appli += 2
del self.appli
'''
astng = abuilder.string_build(data, __name__, __file__)
inst = Instance(astng['WebAppObject'])
# test del statement not returned by getattr
self.assertEqual(len(inst.getattr('appli')), 2)
def test_instance_getattr_with_class_attr(self):
data = '''
class Parent:
aa = 1
cc = 1
class Klass(Parent):
aa = 0
bb = 0
def incr(self, val):
self.cc = self.aa
if val > self.aa:
val = self.aa
if val < self.bb:
val = self.bb
self.aa += val
'''
astng = abuilder.string_build(data, __name__, __file__)
inst = Instance(astng['Klass'])
self.assertEqual(len(inst.getattr('aa')), 3, inst.getattr('aa'))
self.assertEqual(len(inst.getattr('bb')), 1, inst.getattr('bb'))
self.assertEqual(len(inst.getattr('cc')), 2, inst.getattr('cc'))
def test_getattr_method_transform(self):
data = '''
class Clazz(object):
def m1(self, value):
self.value = value
m2 = m1
def func(arg1, arg2):
"function that will be used as a method"
return arg1.value + arg2
Clazz.m3 = func
inst = Clazz()
inst.m4 = func
'''
astng = abuilder.string_build(data, __name__, __file__)
cls = astng['Clazz']
# test del statement not returned by getattr
for method in ('m1', 'm2', 'm3'):
inferred = list(cls.igetattr(method))
self.assertEqual(len(inferred), 1)
self.assertIsInstance(inferred[0], UnboundMethod)
inferred = list(Instance(cls).igetattr(method))
self.assertEqual(len(inferred), 1)
self.assertIsInstance(inferred[0], BoundMethod)
inferred = list(Instance(cls).igetattr('m4'))
self.assertEqual(len(inferred), 1)
self.assertIsInstance(inferred[0], nodes.Function)
def test_getattr_from_grandpa(self):
data = '''
class Future:
attr = 1
class Present(Future):
pass
class Past(Present):
pass
'''
astng = abuilder.string_build(data)
past = astng['Past']
attr = past.getattr('attr')
self.assertEqual(len(attr), 1)
attr1 = attr[0]
self.assertIsInstance(attr1, nodes.AssName)
self.assertEqual(attr1.name, 'attr')
def test_function_with_decorator_lineno(self):
data = '''
@f(a=2,
b=3)
def g1(x):
print x
@f(a=2,
b=3)
def g2():
pass
'''
astng = abuilder.string_build(data)
self.assertEqual(astng['g1'].fromlineno, 4)
self.assertEqual(astng['g1'].tolineno, 5)
self.assertEqual(astng['g2'].fromlineno, 9)
self.assertEqual(astng['g2'].tolineno, 10)
__all__ = ('ModuleNodeTC', 'ImportNodeTC', 'FunctionNodeTC', 'ClassNodeTC')
if __name__ == '__main__':
unittest_main()
| tkaitchuck/nupic | external/common/lib/python2.6/site-packages/logilab/astng/test/unittest_scoped_nodes.py | Python | gpl-3.0 | 24,944 |
import numpy as np
import matplotlib
matplotlib.use("Qt5Agg")
from matplotlib.pyplot import cm
from spimagine import volshow
import OpenGL.GL as GL
if __name__ == '__main__':
data = np.einsum("ij,k",np.ones((100,)*2), np.linspace(0,1,100))
w = volshow(data)
w.glWidget._set_colormap_array(cm.hot(np.linspace(0,1,2**12))[:,:3])
print("maximal texture size: ", GL.glGetIntegerv(GL.GL_MAX_TEXTURE_SIZE)) | maweigert/spimagine | tests/test_gui/test_bitdepth.py | Python | bsd-3-clause | 421 |
import numpy
import energyfunc
import pdb
from scipy import weave
#==========================================
# FORCE CALCULATION METHODS
#==========================================
def cdihedforces(torsparam, bonds, dsq, d, numbeads):
forces = numpy.zeros((numbeads,3))
code = """
double x1, x2, x3, y1, y2, y3, z1, z2, z3;
double mx, my, mz, m2, nx, ny, nz, n2;
double magBC, a, b, dihed, dV;
double Fix, Fiy, Fiz, Fjx, Fjy, Fjz, Fkx, Fky, Fkz, Flx, Fly, Flz;
for (int i = 0; i < numbeads-3; i++){
x1 = BONDS2(i,0); y1 = BONDS2(i,1); z1 = BONDS2(i,2);
x2 = -BONDS2(i+1,0); y2 = -BONDS2(i+1,1); z2 = -BONDS2(i+1,2);
x3 = BONDS2(i+2,0); y3 = BONDS2(i+2,1); z3 = BONDS2(i+2,2);
mx = y1*z2 - z1*y2; my = z1*x2 - x1*z2; mz = x1*y2 - y1*x2;
nx = y2*z3 - z2*y3; ny = z2*x3 - x2*z3; nz = x2*y3 - y2*x3;
m2 = mx*mx + my*my + mz*mz;
n2 = nx*nx + ny*ny + nz*nz;
magBC = sqrt(x2*x2 + y2*y2 + z2*z2);
a = magBC*(x1*nx + y1*ny + z1*nz);
b = mx*nx + my*ny + mz*nz;
dihed = atan2(a,b);
if ( dihed < 0){
dihed += 2*M_PI;
}
dV = -TORSPARAM2(4*i,0)*TORSPARAM2(4*i,1)*sin(TORSPARAM2(4*i,1)*dihed-TORSPARAM2(4*i,2)) - TORSPARAM2(4*i+1,0)*TORSPARAM2(4*i+1,1)*sin(TORSPARAM2(4*i+1,1)*dihed-TORSPARAM2(4*i+1,2)) - TORSPARAM2(4*i+2,0)*TORSPARAM2(4*i+2,1)*sin(TORSPARAM2(4*i+2,1)*dihed-TORSPARAM2(4*i+2,2)) - TORSPARAM2(4*i+3,0)*TORSPARAM2(4*i+3,1)*sin(TORSPARAM2(4*i+3,1)*dihed-TORSPARAM2(4*i+3,2));
Fix = -dV * D1(i+1) * mx / m2; Fiy = -dV * D1(i+1) * my / m2; Fiz = -dV * D1(i+1) * mz / m2;
Flx = dV * D1(i+1) * nx / n2; Fly = dV * D1(i+1) * ny / n2; Flz = dV * D1(i+1) * nz / n2;
a = (x1*x2 + y1*y2 + z1*z2) / DSQ1(i+1);
b = (x2*x3 + y2*y3 + z2*z3) / DSQ1(i+1);
Fjx = (a-1)*Fix - b*Flx; Fjy = (a-1)*Fiy - b*Fly; Fjz = (a-1)*Fiz - b*Flz;
Fkx = -a*Fix + (b-1)*Flx; Fky = -a*Fiy + (b-1)*Fly; Fkz = -a*Fiz + (b-1)*Flz;
FORCES2(i,0) += Fix; FORCES2(i,1) += Fiy; FORCES2(i,2) += Fiz;
FORCES2(i+1,0) += Fjx; FORCES2(i+1,1) += Fjy; FORCES2(i+1,2) += Fjz;
FORCES2(i+2,0) += Fkx; FORCES2(i+2,1) += Fky; FORCES2(i+2,2) += Fkz;
FORCES2(i+3,0) += Flx; FORCES2(i+3,1) += Fly; FORCES2(i+3,2) += Flz;
}
"""
info = weave.inline(code, ['forces', 'torsparam', 'numbeads', 'bonds', 'dsq', 'd'], headers=['<math.h>', '<stdlib.h>'])
return forces*4.184
def cangleforces(mpos, angleparam, bonds, d, numbeads):
rki = mpos[0:-2,:] - mpos[2:len(mpos),:]
forces = numpy.zeros((numbeads,3))
code = """
double xba, yba, zba, xbc, ybc, zbc;
double angle, dV;
double fpx, fpy, fpz, ba2, xy, xz, yz;
double Fix, Fiy, Fiz, Fkx, Fky, Fkz;
for ( int i = 0; i < numbeads - 2; i++){
xba = BONDS2(i,0); yba = BONDS2(i,1); zba = BONDS2(i,2);
xbc = -BONDS2(i+1,0); ybc = -BONDS2(i+1,1); zbc = -BONDS2(i+1,2);
angle = acos((xba*xbc + yba*ybc + zba*zbc) / (D1(i)*D1(i+1)));
//dV = 2 * ANGLEPARAM2(i,0) * (cos(angle) - cos(ANGLEPARAM2(i,1))) / (sin(ANGLEPARAM2(i,1))*sin(ANGLEPARAM2(i,1)));
dV = 2*ANGLEPARAM2(i,0)*(angle-ANGLEPARAM2(i,1))/-sin(angle);
fpx = dV / (D1(i)*D1(i+1)) * RKI2(i,0); fpy = dV / (D1(i)*D1(i+1)) * RKI2(i,1); fpz = dV / (D1(i)*D1(i+1)) * RKI2(i,2);
ba2 = xba*xba + yba*yba + zba*zba;
xy = xba*yba/ba2; xz = xba*zba/ba2; yz = yba*zba/ba2;
Fix = fpx*(1 - xba*xba/ba2) - fpy*xy - fpz*xz;
Fiy = -fpx*xy + fpy*(1 - yba*yba/ba2) - fpz*yz;
Fiz = -fpx*xz - fpy*yz + fpz*(1 - zba*zba/ba2);
ba2 = xbc*xbc + ybc*ybc + zbc*zbc;
xy = xbc*ybc/ba2; xz = xbc*zbc/ba2; yz = ybc*zbc/ba2;
Fkx = -fpx*(1 - xbc*xbc/ba2) + fpy*xy + fpz*xz;
Fky = fpx*xy - fpy*(1 - ybc*ybc/ba2) + fpz*yz;
Fkz = fpx*xz + fpy*yz - fpz*(1 - zbc*zbc/ba2);
FORCES2(i,0) += Fix; FORCES2(i,1) += Fiy; FORCES2(i,2) += Fiz;
FORCES2(i+1,0) += -Fix-Fkx; FORCES2(i+1,1) += -Fiy-Fky; FORCES2(i+1,2) += -Fiz-Fkz;
FORCES2(i+2,0) += Fkx; FORCES2(i+2,1) += Fky; FORCES2(i+2,2) += Fkz;
}
"""
info = weave.inline(code, ['forces', 'angleparam', 'numbeads', 'bonds', 'rki', 'd'], headers=['<math.h>', '<stdlib.h>'])
return forces*4.184
def bondedforces(mpos, torsparam, angleparam, bonds, d2, d, numbeads):
"""Returns the bonded forces (angle + torsion) from a given configuration"""
forces = numpy.zeros((numbeads,3))
arccos = numpy.arccos
sin = numpy.sin
cos = numpy.cos
array = numpy.array
# dihedral force calculation
for i in range(numbeads-3):
# find dihedral angle
rij = bonds[i]
rkj = -bonds[i+1]
rkl = bonds[i+2]
m = array([rij[1]*rkj[2]-rij[2]*rkj[1], rij[2]*rkj[0]-rij[0]*rkj[2], rij[0]*rkj[1]-rij[1]*rkj[0]])
n = array([rkj[1]*rkl[2]-rkj[2]*rkl[1], rkj[2]*rkl[0]-rkj[0]*rkl[2], rkj[0]*rkl[1]-rkj[1]*rkl[0]])
m2 = m[0]*m[0] + m[1]*m[1] + m[2]*m[2]
n2 = n[0]*n[0] + n[1]*n[1] + n[2]*n[2]
dihed = arccos((m[0]*n[0]+m[1]*n[1]+m[2]*n[2]) / (m2*n2)**.5)
if ((m[0]*rkl[0]+m[1]*rkl[1]+m[2]*rkl[2]) < 0):
dihed = -abs(dihed) + 2 * numpy.pi
else:
dihed = abs(dihed)
# calculate gradient of dihedral potential and resulting forces on i j k l
dV = -torsparam[4*i:4*i+4,0] * torsparam[4*i:4*i+4,1] * sin(torsparam[4*i:4*i+4,1] * dihed - torsparam[4*i:4*i+4,2])
dV = dV[0]+dV[1]+dV[2]+dV[3]
Fi = -dV * d[i+1]
Fl = -Fi * n / n2
Fi = Fi * m / m2
Fj = (rij[0]*rkj[0]+rij[1]*rkj[1]+rij[2]*rkj[2]) / d2[i+1] * Fi - (rkl[0]*rkj[0]+rkl[1]*rkj[1]+rkl[2]*rkj[2]) / d2[i+1] * Fl
Fk = -Fj - Fl
Fj = -Fi + Fj
# add forces to total force
forces[i,:] += Fi
forces[i+1,:] += Fj
forces[i+2,:] += Fk
forces[i+3,:] += Fl
#angle force calculation
# find angle
rki = mpos[i,:] - mpos[i+2,:]
rji = bonds[i]
rjk = -bonds[i+1]
dotBABC = rji[0]*rjk[0] + rji[1]*rjk[1] + rji[2]*rjk[2]
dotBA = rji[0]*rji[0] + rji[1]*rji[1] + rji[2]*rji[2]
dotBC = rjk[0]*rjk[0] + rjk[1]*rjk[1] + rjk[2]*rjk[2]
angle = numpy.arccos((rji[0]*rjk[0] + rji[1]*rjk[1] + rji[2]*rjk[2]) / (d[i]*d[i+1])) #in radians
# calculate gradient of angle potential and resulting forces on i j k
dV = 2 * angleparam[i,0] * (cos(angle) - cos(angleparam[i,1])) / sin(angleparam[i,1])**2
fprime = dV / (d[i]*d[i+1]) * rki
Fi = array(projdot(rji,fprime))
Fk = -array(projdot(rjk,fprime))
Fj = -Fi - Fk
# add forces to total force
forces[i,:] += Fi
forces[i+1,:] += Fj
forces[i+2,:] += Fk
# angle force calculations for three end beads not included in loop
i = numbeads - 3
rki = mpos[i,:] - mpos[i+2,:]
rji = bonds[i]
rjk = -bonds[i+1]
dotBABC = rji[0]*rjk[0] + rji[1]*rjk[1] + rji[2]*rjk[2]
dotBA = rji[0]*rji[0] + rji[1]*rji[1] + rji[2]*rji[2]
dotBC = rjk[0]*rjk[0] + rjk[1]*rjk[1] + rjk[2]*rjk[2]
angle = arccos((rji[0]*rjk[0] + rji[1]*rjk[1] + rji[2]*rjk[2]) / (d[i]*d[i+1])) #in radians
dV = 2 * angleparam[i,0] * (cos(angle) - cos(angleparam[i,1])) / sin(angleparam[i,1])**2
fprime = dV / (d[i]*d[i+1]) * rki
Fi = array(projdot(rji,fprime))
Fk = -array(projdot(rjk,fprime))
Fj = -Fi - Fk
# add forces to total force
forces[i,:] += Fi
forces[i+1,:] += Fj
forces[i+2,:] += Fk
return forces *4.184 # convert forces to kJ/mol/K
def projdot(p,d):
p2 = p[0]*p[0] + p[1]*p[1] + p[2]*p[2]
x2 = 1 - p[0]**2/p2
y2 = 1 - p[1]**2/p2
z2 = 1 - p[2]**2/p2
xy = p[0] * p[1] / p2
xz = p[0] * p[2] / p2
yz = p[1] * p[2] / p2
xcomp = d[0]*x2 - d[1]*xy - d[2]*xz
ycomp = -d[0]*xy + d[1]*y2 - d[2]*yz
zcomp = -d[0]*xz - d[1]*yz + d[2]*z2
return [xcomp,ycomp,zcomp]
def nonbondedforces(mpos, numint, numbeads, natparam, nonnatparam, nnepsil):
"""Returns the nonbonded forces of a given configuration"""
forces = numpy.zeros((numbeads,3))
# get distances, square distances, and magnitude distances for all interactions
rvec = energyfunc.cgetforcer(mpos, numint, numbeads) # excludes 12 and 13 neightbors
r2 = numpy.sum(rvec**2, axis=1)
rr = r2**.5
# calculates potential energy gradient for native and nonnative interactions
ndV = natparam[:,0] * natparam[:,2] * natparam[:,2] / r2
ndV6 = ndV * ndV * ndV
ndV = natparam[:,1] * (-156*ndV6*ndV6/rr + 180*ndV6*ndV*ndV/rr - 24*ndV6/rr)
nndV = nonnatparam[:,0] * nonnatparam[:,1] * nonnatparam[:,1] / r2
nndV = nndV * nndV * nndV
nndV = -12 * nnepsil * nndV * nndV / rr
# add forces to total force
k = 0
F = -(ndV+nndV) / rr
F = numpy.transpose(F) * numpy.transpose(rvec) # now 3xN instead of Nx3
F = F.transpose()
#for i in range(numbeads):
#for j in range(i+3,numbeads):
#forces[i,:] += F[:,k]
#forces[j,:] += -F[:,k]
#k += 1
code = """
int k = 0;
for ( int i = 0; i < numbeads; i++){
for ( int j = i+3; j < numbeads; j++){
FORCES2(i,0) += F2(k,0);
FORCES2(i,1) += F2(k,1);
FORCES2(i,2) += F2(k,2);
FORCES2(j,0) += -F2(k,0);
FORCES2(j,1) += -F2(k,1);
FORCES2(j,2) += -F2(k,2);
k++;
}
}
"""
info = weave.inline(code, ['forces', 'F', 'numbeads'], headers=['<math.h>', '<stdlib.h>'])
return forces * 4.184 # converts force to kJ/mol/K
def cnonbondedforces(mpos, numint, numbeads, natparam, nonnatparam, nnepsil):
"""Returns the nonbonded forces of a given configuration"""
forces = numpy.zeros((numbeads,3))
# get distances, square distances, and magnitude distances for all interactions
#rvec = energyfunc.cgetforcer(mpos, numint, numbeads) # excludes 12 and 13 neightbors
code = """
int k = 0;
double r2, r, ndV, ndV6, F,x,y,z;
for ( int i = 0; i < numbeads; i++){
for ( int j = i+3; j < numbeads; j++){
x = MPOS2(i,0) - MPOS2(j,0);
y = MPOS2(i,1) - MPOS2(j,1);
z = MPOS2(i,2) - MPOS2(j,2);
r2 = x*x + y*y + z*z;
r = sqrt(r2);
if (NATPARAM2(k,0) == 1){
ndV = NATPARAM2(k,2)*NATPARAM2(k,2)/r2;
ndV6 = ndV*ndV*ndV;
ndV = NATPARAM2(k,1)*(-156*ndV6*ndV6/r + 180*ndV6*ndV*ndV/r - 24*ndV6/r);
}
else{
ndV = NONNATPARAM2(k,1)*NONNATPARAM2(k,1)/r2;
ndV = ndV*ndV*ndV;
ndV = -12*nnepsil*ndV*ndV/r;
}
F = -ndV/r;
FORCES2(i,0) += F*x;
FORCES2(i,1) += F*y;
FORCES2(i,2) += F*z;
FORCES2(j,0) += -F*x;
FORCES2(j,1) += -F*y;
FORCES2(j,2) += -F*z;
k++;
}
}
"""
info = weave.inline(code, ['forces', 'mpos', 'numbeads', 'natparam', 'nonnatparam', 'nnepsil'], headers=['<math.h>', '<stdlib.h>'])
return forces * 4.184 # converts force to kJ/mol/K
def getsurfforce(prot_coord, surf_coord, numint, numbeads, param):
ep = param[0]
sig = param[1]
scale = param[2]
rvec = numpy.zeros((numint,3))
for i in range(len(surf_coord)):
rvec[i*numbeads:i*numbeads+numbeads] = surf_coord[i,:] - prot_coord
r2 = numpy.sum(rvec**2,axis = 1)
r = r2**.5
ndV = sig*sig/r2
ndV6 = ndV*ndV*ndV
ndV12 = ndV6*ndV6
ndV = -12*ep*ndV12/r+scale[:,0]*ep*(-144*ndV12/r+180*ndV6*ndV*ndV/r - 24*ndV6/r)
F = -ndV/r
F = numpy.transpose(F)*numpy.transpose(rvec)
F = F.transpose()
forces = numpy.zeros((numbeads,3))
for i in range(len(surf_coord)):
forces += -F[i*numbeads:i*numbeads+numbeads,:]
return forces*4.184
def cgetsurfforce(prot_coord, surf_coord, numint, numbeads, param, _):
forces = numpy.zeros((numbeads,3))
ep_in = param[0]
sig_in = param[1]
scale = param[2][:,0]
code = """
double ep = ep_in;
double sig = sig_in;
double x, y, z, r2, r, dV, dV6, F;
for ( int i = 0; i < numint; i++){
x = SURF_COORD2(i/numbeads,0) - PROT_COORD2(i % numbeads, 0);
y = SURF_COORD2(i/numbeads,1) - PROT_COORD2(i % numbeads, 1);
z = 0 - PROT_COORD2(i % numbeads, 2);
r2 = x*x + y*y + z*z;
if(r2<400){
r = sqrt(r2);
dV = sig*sig/r2;
dV6 = dV*dV*dV;
dV = SCALE1(i)*ep*(-(12/SCALE1(i)+144)*dV6*dV6/r+180*dV6*dV*dV/r-24*dV6/r);
F = -dV/r;
x = F*x; y = F*y; z = F*z;
FORCES2(i % numbeads, 0) -= x;
FORCES2(i % numbeads, 1) -= y;
FORCES2(i % numbeads, 2) -= z;
}
}
"""
info = weave.inline(code, ['forces', 'prot_coord', 'surf_coord', 'numbeads', 'numint', 'ep_in', 'sig_in', 'scale'], headers=['<math.h>', '<stdlib.h>'])
return forces*4.184
def getsurfforce_old(prot_coord, surf_coord, numint, numbeads, param):
rvec = numpy.zeros((numint,3))
for i in range(len(surf_coord)):
rvec[i*numbeads:i*numbeads+numbeads] = surf_coord[i,:] - prot_coord
r2 = numpy.sum(rvec**2,axis = 1)
r = r2**.5
ndV = param[:,1]*param[:,1]/r2
ndV6 = ndV*ndV*ndV
#ndV = param[:,0]*(-156*ndV6*ndV6/r+180*ndV6*ndV*ndV/r - 24*ndV6/r)
ndV = param[:,0]*(-12*ndV6*ndV6/r + 12*ndV6/r)
F = -ndV/r
F = numpy.transpose(F)*numpy.transpose(rvec)
F = F.transpose()
forces = numpy.zeros((numbeads,3))
for i in range(len(surf_coord)):
forces += -F[i*numbeads:i*numbeads+numbeads,:]
return forces*4.184
def cgetsurfforce_old(prot_coord, surf_coord, numint, numbeads, param, scale):
forces = numpy.zeros((numbeads,3))
code = """
double x, y, z, r2, r, dV, F;
for ( int i = 0; i < numint; i++){
x = SURF_COORD2(i/numbeads,0) - PROT_COORD2(i % numbeads, 0);
y = SURF_COORD2(i/numbeads,1) - PROT_COORD2(i % numbeads, 1);
z = 0 - PROT_COORD2(i % numbeads, 2);
r2 = x*x + y*y + z*z;
if(r2<400){
r = sqrt(r2);
dV = PARAM2(i,1)*PARAM2(i,1)/r2;
dV = dV*dV*dV;
dV = PARAM2(i,0)*(-12*dV*dV/r + 12*scale*dV/r);
F = -dV/r;
x = F*x; y = F*y; z = F*z;
FORCES2(i % numbeads, 0) -= x;
FORCES2(i % numbeads, 1) -= y;
FORCES2(i % numbeads, 2) -= z;
}
}
"""
info = weave.inline(code, ['forces', 'prot_coord', 'surf_coord', 'numbeads', 'numint', 'param','scale'], headers=['<math.h>', '<stdlib.h>'])
return forces*4.184
#==========================================
# BOND CONSTRAINT METHODS
#==========================================
def shake(bonds, v_half, h, m, d2, maxloop, numbeads, tol):
"""Performs SHAKE algorithm to constrain positions"""
#pdb.set_trace()
loops = 0
conv = numbeads -1
while conv != 0 and loops < maxloop:
conv = numbeads -1
for i in range(numbeads-1):
s = bonds[i] + h * (v_half[i,:]-v_half[i+1,:])
diff = s[0]*s[0] + s[1]*s[1] + s[2]*s[2] - d2[i]
if numpy.abs(diff) < tol:
conv -= 1
else:
g=diff/(2*h*(s[0]*bonds[i,0]+s[1]*bonds[i,1]+s[2]*bonds[i,2])*(1/m[i]+1/m[i+1]))
v_half[i,:]-=g/m[i]*bonds[i]
v_half[i+1,:]+=g/m[i+1]*bonds[i]
loops += 1
conv = True
if loops == maxloop:
conv = False
return v_half, conv
def rattle(bonds, vel, m, d2, maxloop, numbeads, tol):
"""Performs RATTLE algorithm to constrain velocities"""
conv = numbeads - 1 #numpy.ones(numbeads-1)
loops = 0
while conv != 0 and loops < maxloop:
conv = numbeads - 1
for i in range(numbeads-1):
vij = vel[i,:] - vel[i+1,:]
diff = bonds[i,0]*vij[0] + bonds[i,1]*vij[1] + bonds[i,2]*vij[2]
if numpy.abs(diff) < tol:
conv -= 1
else:
k = diff / (d2[i] * (1/m[i]+1/m[i+1]))
vel[i] -= k / m[i] * bonds[i,:]
vel[i+1] += k / m[i+1] * bonds[i,:]
loops += 1
conv = True
if loops == maxloop:
conv = False
return vel, conv
def cshake(bonds, v_half, h, m, dsq, maxloop, numbeads, tol):
"""Performs SHAKE algorithm to constrain positions"""
loops = numpy.array([0])
code="""
int conv = numbeads - 1;
double x, y, z, diff, g;
while ( conv != 0 && LOOPS1(0) < maxloop) {
conv = numbeads - 1;
for ( int i = 0; i < numbeads-1; i++){
x = BONDS2(i,0) + h * (V_HALF2(i,0)-V_HALF2(i+1,0));
y = BONDS2(i,1) + h * (V_HALF2(i,1)-V_HALF2(i+1,1));
z = BONDS2(i,2) + h * (V_HALF2(i,2)-V_HALF2(i+1,2));
diff = x * x + y * y + z * z - DSQ1(i);
if (fabs(diff) < tol){
conv -= 1;
}
else{
g = diff / (2.0*h*(x*BONDS2(i,0)+y*BONDS2(i,1)+z*BONDS2(i,2))*(1.0/M1(i)+1.0/M1(i+1)));
V_HALF2(i,0) -= g/M1(i)*BONDS2(i,0);
V_HALF2(i,1) -= g/M1(i)*BONDS2(i,1);
V_HALF2(i,2) -= g/M1(i)*BONDS2(i,2);
V_HALF2(i+1,0) += g/M1(i+1)*BONDS2(i,0);
V_HALF2(i+1,1) += g/M1(i+1)*BONDS2(i,1);
V_HALF2(i+1,2) += g/M1(i+1)*BONDS2(i,2);
}
}
LOOPS1(0) += 1;
}
"""
info = weave.inline(code, ['bonds', 'v_half', 'h', 'm', 'dsq', 'maxloop', 'numbeads', 'tol','loops'], headers=['<math.h>', '<stdlib.h>'])
#print "cshake iterations " + str(loops[0])
conv = True
if loops[0] == maxloop:
conv = False
return v_half, conv
def crattle(bonds, vel, m, dsq, maxloop, numbeads, tol):
"""Performs RATTLE algorithm to constrain velocities"""
loops = numpy.array([0])
code = """
int conv = numbeads - 1;
double diff, k, x, y, z;
while ( conv != 0 && LOOPS1(0) < maxloop){
conv = numbeads - 1;
for ( int i = 0; i < numbeads-1; i++){
x = VEL2(i,0) - VEL2(i+1,0);
y = VEL2(i,1) - VEL2(i+1,1);
z = VEL2(i,2) - VEL2(i+1,2);
diff = BONDS2(i,0)*x + BONDS2(i,1)*y + BONDS2(i,2)*z;
if (fabs(diff) < tol){
conv -= 1;
}
else{
k = diff / (DSQ1(i) * (1.0/M1(i)+1.0/M1(i+1)));
VEL2(i,0) -= k/M1(i)*BONDS2(i,0);
VEL2(i,1) -= k/M1(i)*BONDS2(i,1);
VEL2(i,2) -= k/M1(i)*BONDS2(i,2);
VEL2(i+1,0) += k/M1(i+1)*BONDS2(i,0);
VEL2(i+1,1) += k/M1(i+1)*BONDS2(i,1);
VEL2(i+1,2) += k/M1(i+1)*BONDS2(i,2);
}
}
LOOPS1(0)++;
}
"""
info = weave.inline(code, ['bonds', 'vel', 'm', 'dsq', 'maxloop', 'numbeads', 'tol', 'loops'], headers=['<math.h>', '<stdlib.h>'])
conv = True
if loops[0] == maxloop:
conv = False
return vel, conv
| shirtsgroup/pygo | package/HMCforce.py | Python | gpl-2.0 | 18,951 |
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from base import Layout
from .. import utils, manager
class _WinStack(object):
split = False
_current = 0
def _getCurrent(self):
return self._current
def _setCurrent(self, x):
if len(self):
self._current = abs(x % len(self))
else:
self._current = 0
current = property(_getCurrent, _setCurrent)
@property
def cw(self):
if not self.lst:
return None
return self.lst[self.current]
def __init__(self):
self.lst = []
def toggleSplit(self):
self.split = False if self.split else True
def join(self, ws):
# FIXME: This buggers up window order -
# windows should be injected BEFORE
# the current offset.
self.lst.extend(ws.lst)
def focus(self, w):
self.current = self.lst.index(w)
def focus_first(self):
if self:
if self.split:
return self[0]
else:
return self.cw
def focus_next(self, win):
if self.split:
idx = self.index(win)
if idx + 1 < len(self):
return self[idx + 1]
def focus_last(self):
if self:
if self.split:
return self[-1]
else:
return self.cw
def focus_prev(self, win):
if self.split:
idx = self.index(win)
if idx > 0:
return self[idx - 1]
def add(self, w):
self.lst.insert(self.current, w)
def remove(self, w):
idx = self.lst.index(w)
self.lst.remove(w)
if idx > self.current:
self.current -= 1
else:
# This apparently nonsensical assignment caps the value using the
# property definition.
self.current = self.current
def index(self, c):
return self.lst.index(c)
def __len__(self):
return len(self.lst)
def __getitem__(self, i):
return self.lst[i]
def __contains__(self, x):
return x in self.lst
def __repr__(self):
return "_WinStack(%s, %s)" % (
self.current, str([i.name for i in self]))
def info(self):
return dict(
windows=[i.name for i in self],
split=self.split,
current=self.current,
)
class Stack(Layout):
"""
The stack layout divides the screen horizontally into a set of stacks.
Commands allow you to switch between stacks, to next and previous
windows within a stack, and to split a stack to show all windows in the
stack, or unsplit it to show only the current window. At the moment,
this is the most mature and flexible layout in Qtile.
"""
defaults = [
("border_focus", "#0000ff", "Border colour for the focused window."),
("border_normal", "#000000", "Border colour for un-focused winows."),
("border_width", 1, "Border width."),
("name", "stack", "Name of this layout."),
]
def __init__(self, stacks=2, **config):
"""
- stacks: Number of stacks to start with.
"""
Layout.__init__(self, **config)
self.stacks = [_WinStack() for i in range(stacks)]
self.add_defaults(Stack.defaults)
@property
def currentStack(self):
return self.stacks[self.currentStackOffset]
@property
def currentStackOffset(self):
for i, s in enumerate(self.stacks):
if self.group.currentWindow in s:
return i
return 0
def clone(self, group):
c = Layout.clone(self, group)
# These are mutable
c.stacks = [_WinStack() for i in self.stacks]
return c
def _findNext(self, lst, offset):
for i in lst[offset + 1:]:
if i:
return i
else:
for i in lst[:offset]:
if i:
return i
def deleteCurrentStack(self):
if len(self.stacks) > 1:
off = self.currentStackOffset or 0
s = self.stacks[off]
self.stacks.remove(s)
off = min(off, len(self.stacks) - 1)
self.stacks[off].join(s)
if self.stacks[off]:
self.group.focus(
self.stacks[off].cw,
False
)
def nextStack(self):
n = self._findNext(
self.stacks,
self.currentStackOffset
)
if n:
self.group.focus(n.cw, True)
def previousStack(self):
n = self._findNext(
list(reversed(self.stacks)),
len(self.stacks) - self.currentStackOffset - 1
)
if n:
self.group.focus(n.cw, True)
def focus(self, c):
for i in self.stacks:
if c in i:
i.focus(c)
def focus_first(self):
for i in self.stacks:
if i:
return i.focus_first()
def focus_last(self):
for i in reversed(self.stacks):
if i:
return i.focus_last()
def focus_next(self, c):
iterator = iter(self.stacks)
for i in iterator:
if c in i:
next = i.focus_next(c)
if next:
return next
break
else:
return
for i in iterator:
if i:
return i.focus_first()
def focus_prev(self, c):
iterator = iter(reversed(self.stacks))
for i in iterator:
if c in i:
next = i.focus_prev(c)
if next:
return next
break
else:
return
for i in iterator:
if i:
return i.focus_last()
def add(self, c):
for i in self.stacks:
if not i:
i.add(c)
return
self.currentStack.add(c)
def remove(self, c):
currentOffset = self.currentStackOffset
for i in self.stacks:
if c in i:
i.remove(c)
break
if self.stacks[currentOffset].cw:
return self.stacks[currentOffset].cw
else:
n = self._findNext(
list(reversed(self.stacks)),
len(self.stacks) - currentOffset - 1
)
if n:
return n.cw
def configure(self, c, screen):
for i, s in enumerate(self.stacks):
if c in s:
break
else:
c.hide()
if c is self.group.currentWindow:
px = self.group.qtile.colorPixel(self.border_focus)
else:
px = self.group.qtile.colorPixel(self.border_normal)
columnWidth = int(screen.width / float(len(self.stacks)))
xoffset = screen.x + i * columnWidth
winWidth = columnWidth - 2 * self.border_width
if s.split:
columnHeight = int(screen.height / float(len(s)))
winHeight = columnHeight - 2 * self.border_width
yoffset = screen.y + s.index(c) * columnHeight
c.place(
xoffset,
yoffset,
winWidth,
winHeight,
self.border_width,
px
)
c.unhide()
else:
if c == s.cw:
c.place(
xoffset,
screen.y,
winWidth,
screen.height - 2 * self.border_width,
self.border_width,
px
)
c.unhide()
else:
c.hide()
def info(self):
d = Layout.info(self)
d["stacks"] = [i.info() for i in self.stacks]
d["current_stack"] = self.currentStackOffset
return d
def cmd_toggle_split(self):
"""
Toggle vertical split on the current stack.
"""
self.currentStack.toggleSplit()
self.group.layoutAll()
def cmd_down(self):
"""
Switch to the next window in this stack.
"""
self.currentStack.current -= 1
self.group.focus(self.currentStack.cw, False)
def cmd_up(self):
"""
Switch to the previous window in this stack.
"""
self.currentStack.current += 1
self.group.focus(self.currentStack.cw, False)
def cmd_shuffle_up(self):
"""
Shuffle the order of this stack up.
"""
utils.shuffleUp(self.currentStack.lst)
self.currentStack.current += 1
self.group.layoutAll()
def cmd_shuffle_down(self):
"""
Shuffle the order of this stack down.
"""
utils.shuffleDown(self.currentStack.lst)
self.currentStack.current -= 1
self.group.layoutAll()
def cmd_delete(self):
"""
Delete the current stack from the layout.
"""
self.deleteCurrentStack()
def cmd_add(self):
"""
Add another stack to the layout.
"""
self.stacks.append(_WinStack())
self.group.layoutAll()
def cmd_rotate(self):
"""
Rotate order of the stacks.
"""
utils.shuffleUp(self.stacks)
self.group.layoutAll()
def cmd_next(self):
"""
Focus next stack.
"""
return self.nextStack()
def cmd_previous(self):
"""
Focus previous stack.
"""
return self.previousStack()
def cmd_client_to_next(self):
"""
Send the current client to the next stack.
"""
return self.cmd_client_to_stack(self.currentStackOffset + 1)
def cmd_client_to_previous(self):
"""
Send the current client to the previous stack.
"""
return self.cmd_client_to_stack(self.currentStackOffset - 1)
def cmd_client_to_stack(self, n):
"""
Send the current client to stack n, where n is an integer offset.
If is too large or less than 0, it is wrapped modulo the number of
stacks.
"""
if not self.currentStack:
return
next = n % len(self.stacks)
win = self.currentStack.cw
self.currentStack.remove(win)
self.stacks[next].add(win)
self.stacks[next].focus(win)
self.group.layoutAll()
def cmd_info(self):
return self.info()
| Fxrh/tispa-wm | libqtile/layout/stack.py | Python | gpl-3.0 | 11,746 |
from __future__ import absolute_import
from .pypi import show
from .pypi import list
| harshasrinivas/pypilist | pypilist/__init__.py | Python | mit | 85 |
import os
import io
import os.path
from recipemd.parser import parseFile
from recipemd import git
from recipemd.serializer.xml import dump
def process(obj_id, target, xslt = None):
""" get file from git, process, write to target folder
Arguments:
obj_id -- git object id of the file (as string)
target -- target folder path (as string, with trailing slash)
xslt -- xslt file path if xml-stylesheet PI should be included,
no PI will be included if null (which is default)
"""
stream = io.TextIOWrapper(git.blob_file_handle(obj_id), encoding='utf8')
r = parseFile(stream)
dump(target, r, xslt)
def xml_filename(name, path):
if not name.endswith('.rmd'):
raise Exception('Invalid file extension for recipe ({})'.format(name))
clean = name[0:-4] + '.xml'
return os.path.join(path, clean)
def clean_output_dir(path):
""" remove xml files and index.html from path """
for (_, _, filenames) in os.walk(path):
for f in filenames:
if f.split('.')[-1] == 'xml':
os.remove(os.path.join(path, f))
try:
os.remove(path + '/index.html')
except FileNotFoundError:
pass
| andreasWallner/recipe-markdown | recipemd/common.py | Python | mit | 1,135 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# Author : rootntsd
# E-mail : rootntsd@gmail.com
# Date : 14/01/26 0:35:00
# Desc : '''
#
#from __init__ import BasePlugin
from plugins import BasePlugin
import logging
import inspect
import os
logger = logging.getLogger("plugin")
def callit(obj, mname, args=[]):
mth = eval('obj.%s'%(mname))
if callable(mth):
return apply(mth, args)
else:
return mth
class ShellPlugin(BasePlugin):
pre_fix="!@#"
def is_match(self, from_uin, content, type):
self.send_content=''
if content.startswith(self.pre_fix):
try:
content=content.strip(self.pre_fix);
# if content.startswith('list'):
# content=content.lstrip('list')
# if content=='':
# self.send_content=dir(self)
# else:
# self.send_content=inspect.getargspec(getattr(self,content.lstrip(' ')))
# pass
# elif content=='show':
# self.send_content='show'
# pass
# else:
# if hasattr(self,content):
funname=content.split(" ")[0]
content=content.strip(funname)
#fun=getattr(self,funname)
self.send_content=self.send_content+' '+funname
print 'content:',content
args=[]
if content!='':
args=list(content.split(','))
# if args[0]=='':
# args=[]
print args,len(args)
self.send_content=callit(self,funname,args)
# else:
# #self.send_content=dir('self.'+content)
# pass
pass
except Exception,e:
self.send_content=e
logger.error(u"Plugin was encoutered an error {0}".format(e), exc_info = True)
return True
return False
def handle_message(self, callback):
callback(self.send_content)
return True
def test(self):
print 'test'
return 'hello'
def ls(self,args=[]):
return os.popen('ls '+''.join(args)).read()
if __name__=="__main__":
s=ShellPlugin(None,None,None,None)
s.is_match(1,'!@#test','s')
s.is_match(1,'!@#pre_fix','s')
print s.send_content
| evilbinary/robot | plugins/shell.py | Python | mit | 2,486 |
from bokeh.charts import Scatter, output_file, show
x = [1, 2, 3, 4, 5, 6, 7, 8]
y = [2.1, 6.45, 3, 1.4, 4.55, 3.85, 5.2, 0.7]
z = [.5, 1.1, 1.9, 2.5, 3.1, 3.9, 4.85, 5.2]
species = ['cat', 'cat', 'cat', 'dog', 'dog', 'dog', 'mouse', 'mouse']
country = ['US', 'US', 'US', 'US', 'UK', 'UK', 'BR', 'BR']
df = {'time': x, 'weight 1': y, 'weight 2': z, 'species':species, 'country': country}
scatter = Scatter(df, x='time', y='weight 1', color='country', marker='species',
title="Auto MPG", xlabel="Time in days",
ylabel="Weight in grams")
output_file('scatter.html')
show(scatter)
| Serulab/Py4Bio | code/ch14/scatter.py | Python | mit | 618 |
# -*- coding: utf-8 -*-
"""
Hybrid Dile Server
~~~~~~~~
A data tile server
:copyright: (c) 2016 by Raffaele Montella & Sergio Apreda.
:license: Apache 2.0, see LICENSE for more details.
"""
import json, sys, re, urllib, urllib2, socket, unicodedata
import pydoc, cgi, os, time, inspect, collections
from hashlib import md5
from datetime import datetime
from pymongo import MongoClient
from geojson import Feature, Point, FeatureCollection
from ast import literal_eval
from utils.misc import pathLeaf
from diles.dilefactory import DileFactory
from utils.querybuildermongo import QueryBuilderMongo
from flask import Flask
from flask import Response
from flask import request
from flask import jsonify
from flask import current_app
from flask import make_response
from flask import session
from flask import url_for
from flask import redirect
from flask import render_template
from flask import abort
from flask import g
from flask import flash
from flask import _app_ctx_stack
from functools import wraps
from functools import update_wrapper
from boto.s3.connection import S3Connection
from boto.s3.connection import Location
from boto.exception import S3ResponseError
AWS_ACCESS_KEY_ID=""
AWS_SECRET_ACCESS_KEY=""
with open('/home/ubuntu/.s3/AWS_ACCESS_KEY_ID', 'r') as myfile:
AWS_ACCESS_KEY_ID=myfile.read().replace('\n', '')
with open('/home/ubuntu/.s3/AWS_SECRET_ACCESS_KEY', 'r') as myfile:
AWS_SECRET_ACCESS_KEY=myfile.read().replace('\n', '')
#### CROSSDOMAIN DECORATOR ####
def crossdomain(origin=None, methods=None, headers=None, max_age=21600, attach_to_all=True, automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
h['Access-Control-Allow-Credentials'] = 'true'
h['Access-Control-Allow-Headers'] = "Origin, X-Requested-With, Content-Type, Accept, Authorization"
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
#### JSONP DECORATOR ####
def jsonp(func):
""" Wrap json as jsonp """
@wraps(func)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
# create our little application :)
app = Flask(__name__)
# Load default config and override config from an environment variable
app.config.update(dict(
DATABASE = 'test',
COLLECTION_DILES = 'diles',
COLLECTION_FILES = 'files',
DEBUG = True,
SECRET_KEY = 'development key',
USERNAME = 'admin',
PASSWORD = 'default',
LOCATION = 'loc.geometry',
TIME = 'time',
LIMIT = 100
))
app.config.from_envvar('HYBRID_DILE_SERVER_SETTINGS', silent=True)
@app.teardown_appcontext
def close_database(exception):
"""Closes the database again at the end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'db'):
top.db.close()
def connect_db():
"""Connects to the specific database."""
client = MongoClient()
db = client[app.config['DATABASE']]
return db
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, 'db'):
g.db = connect_db()
return g.db
def init_db():
"""Initializes the database."""
db = get_db()
def uJsonToDict(param):
# if param isn't None and it's str,unicode type
if param is not None and isinstance(param, (basestring)):
try:
jstring = json.loads(json.dumps(param))
item = literal_eval(jstring)
print item
except:
return None
else:
if item:
return item
else:
return None
else:
return None
""" --------------------- DB QUERY FUNCTIONS --------------------- """
def query_diles_db(query):
db = get_db()
# convention: query[size:2] -- query[0]: query document, query[1]: projection
if query[0]:
# returns the result set with a limit of 100 entities
# Note: could be interesting to use the sort function on an automatically
# generated counter field that measures the times the a document was matched
# question: is a document considered matching if outside of the result limit ? (prb not)
return list(db[app.config['COLLECTION_DILES']].find(query[0],query[1]).limit(app.config['LIMIT']))
else:
return "ERROR: malformed query"
def query_files_db(query):
db = get_db()
# convention: query[size:2] -- query[0]: query document, query[1]: projection
if query[0]:
return list(db[app.config['COLLECTION_FILES']].find(query[0],query[1]).limit(app.config['LIMIT']))
else:
return "ERROR: malformed query"
def aggregate_result_diles(pipeline):
db = get_db()
return list(db[app.config['COLLECTION_DILES']].aggregate(pipeline).limit(app.config['LIMIT']))
def aggregate_result_diles(pipeline):
db = get_db()
return list(db[app.config['COLLECTION_FILES']].aggregate(pipeline).limit(app.config['LIMIT']))
""" ---------------------------------------------------------------- """
""" ------------- DICT OPERATIONS (FOR DOC BASED DB) --------------- """
def getKeyValue(dictionary,param):
if param in dictionary:
return dictionary[param]
else:
for key in dictionary:
if type(dictionary.get(key)) == type(dict()):
return getKeyValue(dictionary.get(key),param)
return None
def polyToBB(feature):
coords = feature['geometry']['coordinates']
bb = {
"lon_min" : float(coords[0][0]),
"lon_max" : float(coords[2][0]),
"lat_min" : float(coords[0][1]),
"lat_max" : float(coords[2][1])
}
return bb
def getDimentions(param, qbm):
dimensions = uJsonToDict(param)
# being a monodimensional interval per variable, a dict doesn't cause
# collisions, because any overlap can be resolved by the extention of the domain
if dimensions is not None and isinstance(dimensions,dict):
for key in dimensions:
d = dimensions[key]
# convention: d[size:2] -- d[0]: offset start, d[1]: offset end
if key.lower() == app.config['TIME']:
qbm.addField(qbm.queryTimeRange(key,d[0],d[1]))
else:
qbm.addField(qbm.queryRange(key,d[0],d[1]))
return qbm
def getFeature(param, qbm):
feature = uJsonToDict(param)
# in this case overalp could happen spatially speaking, but it doesn't matter
# in mongodb the geointersect handle geojsons as is (supposedly)
if feature is not None and isinstance(feature, dict):
try:
if feature['geometry']['type'] == 'Point':
c = feature['geometry']['coordinates']
qbm.addField(qbm.queryIntersectPoint(app.config['LOCATION'], float(c[0]), float(c[1])))
elif feature['geometry']['type'] == 'Polygon':
bb = polyToBB(feature)
qbm.addField(qbm.queryIntersectBbox(app.config['LOCATION'], bb))
else:
pass
except:
pass
return qbm
def getVariables(var,qbm):
if isinstance(var,(tuple,list)):
queries = [ {"variable": x} for x in var if isinstance(x,basestring)]
if len(queries) > 1:
try:
qbm.addField(qbm.queryLogical('or',queries))
except:
pass
elif len(queries) == 1:
try:
qbm.addField({"variable":var[0]})
except:
pass
else:
pass
elif isinstance(var,basestring):
try:
qbm.addField({"variable": var})
except:
pass
return qbm
""" ---------------------------------------------------------------- """
@app.cli.command('initdb')
def initdb_command():
"""Creates the database tables."""
init_db()
print('Initialized the database.')
@app.route('/')
def index():
"""Shows the home page
"""
actions=[]
note1 = {
"title" : "Dimensions (use with any of the above)",
"example": "?dim={'time'%3A+['1980-01-01-00%3A00%3A00'%2C+'1980-01-02-00%3A00%3A00']}",
"desc": "add in the url query field, can have multipe keys (like time) for multiple dims"
}
note2 = {
"title" : "Variables (use with any of the above)",
"example": "?var=pr&var=tasmax",
"desc": "add in the url query field, can add multiple variables as for example"
}
my_path=os.path.abspath(inspect.getfile(inspect.currentframe()))
with open(my_path) as f:
add=False
action=None
for line in f:
line=line.lstrip()
if line.startswith("@app."):
line=line.replace("@app.","").replace("'",'"').replace("\n","")
method=None
if line.startswith("route"):
method="get"
path=re.findall(r'"([^"]*)"', line)[0]
if path != '/':
action={"method":method,"url":cgi.escape(path),"params":[]}
elif line.startswith('"""') and action is not None:
if add is False:
add=True
action['title']=line.replace('"""','').strip()
else:
add=False
actions.append(action)
action=None
elif line.startswith("@jsonp"):
action['jsonp']=True
elif line.startswith("@crossdomain"):
action['crossdomain']=True
else:
if add is True:
if ":example:" in line:
line=line.replace(":example: ","").strip()
action['example']=request.host+line
elif line.startswith(":param: "):
line=line.replace(":param: ","").strip()
name=line.split(" -- ")[0]
desc=line.split(" -- ")[1]
action['params'].append({"name":name,"desc":desc})
elif line.startswith(":returns: "):
line=line.replace(":returns: ","").strip()
action['returns']=line
else:
pass
actions.append(note1)
actions.append(note2)
return render_template('layout.html',actions=actions)
"""
-------------------------------------------------------------------------------------------
"""
@app.route('/discovery/dile/by/feature')
def discovery_dile_by_feature():
"""Discovery the diles given a Feature (Point or Polygon)
:param: feat -- json feature
:param: dim -- json document
:param: var -- single or multiple string variables' names
:example: /discovery/dile/by/feature?feat={'geometry'%3A+{'type'%3A+'Point'%2C+'coordinates'%3A+[-90%2C+42.293564192170095]}%2C+'type'%3A+'Feature'%2C+'properties'%3A+{}}
:returns: geojson -- return a feature collection with the selected diles.
-------------------------------------------------------------------------------------------
"""
# creating the object to build the queries
qbm = QueryBuilderMongo()
# request the arguments of the url query
f_param = request.args.get('feat')
d_param = request.args.get('dim')
v_param = request.args.getlist('var')
# creating the feature query
if f_param is not None:
qbm = getFeature(f_param, qbm)
else:
return "ERROR: -feat- not found"
# creating the dimension query
if d_param is not None:
qbm = getDimentions(d_param, qbm)
# creating the variables query
if v_param:
qbm = getVariables(v_param, qbm)
# adding the projection
qbm.addProjection({"_id": 0, "uri" : 1})
return jsonify(query_diles_db(qbm.getQuery()))
@app.route('/discovery/dile/by/position/<lon>/<lat>')
@jsonp
def discovery_dile_by_position(lon,lat):
"""Discovery the diles given a lon/lat position.
:example: /discovery/dile/by/position/-135.0/22.5
:param: dim -- json document
:param: var -- single or multiple string variables' names
:returns: geojson -- the return a feature collection with the selected diles.
-------------------------------------------------------------------------------------------
"""
qbm = QueryBuilderMongo()
d_param = request.args.get('dim')
v_param = request.args.getlist('var')
# creating the dimension query
if d_param is not None:
qbm = getDimentions(d_param, qbm)
# creating the variables query
if v_param:
qbm = getVariables(v_param, qbm)
query = qbm.queryIntersectPoint(app.config['LOCATION'], float(lon), float(lat))
qbm.addField(query)
qbm.addProjection({"_id": 0, "uri" : 1})
return jsonify(query_diles_db(qbm.getQuery()))
@app.route('/discovery/dile/by/radius/<lon>/<lat>/<radius>')
@jsonp
def discovery_dile_by_radius(lon,lat,radius):
"""Discovery the diles given a center point by lon/lat and a radius in km.
:example: /discovery/dile/by/radius/-135.0/22.5/5000.0
:param: dim -- json document
:param: var -- single or multiple string variables' names
:returns: geojson -- the return a feature collection with the selected diles.
-------------------------------------------------------------------------------------------
"""
qbm = QueryBuilderMongo()
d_param = request.args.get('dim')
v_param = request.args.getlist('var')
# creating the dimension query
if d_param is not None:
qbm = getDimentions(d_param, qbm)
# creating the variables query
if v_param:
qbm = getVariables(v_param, qbm)
query = qbm.queryIntersectRadius(app.config['LOCATION'], float(lon), float(lat), float(radius))
qbm.addField(query)
qbm.addProjection({"_id": 0, "uri" : 1})
return jsonify(query_diles_db(qbm.getQuery()))
@app.route('/discovery/dile/by/bbox/<minLon>/<minLat>/<maxLon>/<maxLat>')
@jsonp
def discovery_dile_by_bbox(minLon,minLat,maxLon,maxLat):
"""Discovery the diles given a bounding box.
:example: /discovery/dile/by/bbox/-135.0/22.5/-45.0/67.5
:param: dim -- json document
:param: var -- single or multiple string variables' names
:returns: geojson -- the return a feature collection with the selected diles.
-------------------------------------------------------------------------------------------
"""
bb = {
"lat_min": float(minLat),
"lat_max": float(maxLat),
"lon_min": float(minLon),
"lon_max": float(maxLon)
}
qbm = QueryBuilderMongo()
d_param = request.args.get('dim')
v_param = request.args.getlist('var')
# creating the dimension query
if d_param is not None:
qbm = getDimentions(d_param, qbm)
# creating the variables query
if v_param:
qbm = getVariables(v_param, qbm)
query = qbm.queryIntersectBbox(app.config['LOCATION'],bb)
qbm.addField(query)
qbm.addProjection({"_id": 0, "uri" : 1})
return jsonify(query_diles_db(qbm.getQuery()))
@app.route('/select/dile')
def select_dile_by_uri():
"""Download a dile given a uri.
:example: /select/dile?uri=http://s3.amazonaws.com/edu-uchicago-rdcep-diles/fd65252e41e3cf0b431a07ad6e2cbe85/sdile_pr_2_1_1/pr/0/2/1/1/dile_0_2_1_1.nc
:param: uri -- a valid uri to access the dile
:returns: netcdf4 -- the return of the dile.
-------------------------------------------------------------------------------------------
"""
uri=request.args.get('uri')
if uri is not None:
if uri.startswith("http://s3.amazonaws.com/"):
path = uri.replace("http://s3.amazonaws.com/","")
bname, kstr = path.split("/",1) # split the bname from the key string
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
try:
bucket = conn.get_bucket(bname)
except:
print "BUCKET NOT FOUND"
return str("ERROR: bucket "+bname+" not found")
else:
print "BUCKET CONNECTED"
try:
key = bucket.get_key(kstr)
print "KEY: ", key
except:
print "KEY NOT FOUND"
return str("ERROR: key "+kstr+"not found")
else:
try:
key.open_read() # opens the file
headers = dict(key.resp.getheaders()) # request the headers
headers["Content-Disposition"] = "inline; filename="+str(pathLeaf(key.name))
return Response(key, headers=headers) # return a response
except S3ResponseError as e:
return Response(e.body, status=e.status, headers=key.resp.getheaders())
abort(400) | RDCEP/hybrid-dile-server | lib/hybrid_dile_server.py | Python | apache-2.0 | 18,985 |
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import importlib
import os
from .fairseq_task import FairseqTask
TASK_REGISTRY = {}
TASK_CLASS_NAMES = set()
def setup_task(args):
return TASK_REGISTRY[args.task].setup_task(args)
def register_task(name):
"""Decorator to register a new task."""
def register_task_cls(cls):
if name in TASK_REGISTRY:
raise ValueError('Cannot register duplicate task ({})'.format(name))
if not issubclass(cls, FairseqTask):
raise ValueError('Task ({}: {}) must extend FairseqTask'.format(name, cls.__name__))
if cls.__name__ in TASK_CLASS_NAMES:
raise ValueError('Cannot register task with duplicate class name ({})'.format(cls.__name__))
TASK_REGISTRY[name] = cls
TASK_CLASS_NAMES.add(cls.__name__)
return cls
return register_task_cls
# automatically import any Python files in the tasks/ directory
for file in os.listdir(os.path.dirname(__file__)):
if file.endswith('.py') and not file.startswith('_'):
module = file[:file.find('.py')]
importlib.import_module('fairseq.tasks.' + module)
| mlperf/training_results_v0.5 | v0.5.0/nvidia/submission/code/translation/pytorch/fairseq/tasks/__init__.py | Python | apache-2.0 | 1,387 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2014, Jens Depuydt <http://www.jensd.be>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: postgresql_lang
short_description: Adds, removes or changes procedural languages with a PostgreSQL database.
description:
- Adds, removes or changes procedural languages with a PostgreSQL database.
- This module allows you to add a language, remote a language or change the trust
relationship with a PostgreSQL database. The module can be used on the machine
where executed or on a remote host.
- When removing a language from a database, it is possible that dependencies prevent
the database from being removed. In that case, you can specify casade to
automatically drop objects that depend on the language (such as functions in the
language). In case the language can't be deleted because it is required by the
database system, you can specify fail_on_drop=no to ignore the error.
- Be carefull when marking a language as trusted since this could be a potential
security breach. Untrusted languages allow only users with the PostgreSQL superuser
privilege to use this language to create new functions.
version_added: "1.7"
options:
lang:
description:
- name of the procedural language to add, remove or change
required: true
default: null
trust:
description:
- make this language trusted for the selected db
required: false
default: no
choices: [ "yes", "no" ]
db:
description:
- name of database where the language will be added, removed or changed
required: false
default: null
force_trust:
description:
- marks the language as trusted, even if it's marked as untrusted in pg_pltemplate.
- use with care!
required: false
default: no
choices: [ "yes", "no" ]
fail_on_drop:
description:
- if C(yes), fail when removing a language. Otherwise just log and continue
- in some cases, it is not possible to remove a language (used by the db-system). When dependencies block the removal, consider using C(cascade).
required: false
default: 'yes'
choices: [ "yes", "no" ]
cascade:
description:
- when dropping a language, also delete object that depend on this language.
- only used when C(state=absent).
required: false
default: no
choices: [ "yes", "no" ]
port:
description:
- Database port to connect to.
required: false
default: 5432
login_user:
description:
- User used to authenticate with PostgreSQL
required: false
default: postgres
login_password:
description:
- Password used to authenticate with PostgreSQL (must match C(login_user))
required: false
default: null
login_host:
description:
- Host running PostgreSQL where you want to execute the actions.
required: false
default: localhost
state:
description:
- The state of the language for the selected database
required: false
default: present
choices: [ "present", "absent" ]
notes:
- The default authentication assumes that you are either logging in as or
sudo'ing to the postgres account on the host.
- This module uses psycopg2, a Python PostgreSQL database adapter. You must
ensure that psycopg2 is installed on the host before using this module. If
the remote host is the PostgreSQL server (which is the default case), then
PostgreSQL must also be installed on the remote host. For Ubuntu-based
systems, install the postgresql, libpq-dev, and python-psycopg2 packages
on the remote host before using this module.
requirements: [ psycopg2 ]
author: "Jens Depuydt (@jensdepuydt)"
'''
EXAMPLES = '''
# Add language pltclu to database testdb if it doesn't exist:
- postgresql_lang db=testdb lang=pltclu state=present
# Add language pltclu to database testdb if it doesn't exist and mark it as trusted:
# Marks the language as trusted if it exists but isn't trusted yet
# force_trust makes sure that the language will be marked as trusted
- postgresql_lang:
db: testdb
lang: pltclu
state: present
trust: yes
force_trust: yes
# Remove language pltclu from database testdb:
- postgresql_lang:
db: testdb
lang: pltclu
state: absent
# Remove language pltclu from database testdb and remove all dependencies:
- postgresql_lang:
db: testdb
lang: pltclu
state: absent
cascade: yes
# Remove language c from database testdb but ignore errors if something prevents the removal:
- postgresql_lang:
db: testdb
lang: pltclu
state: absent
fail_on_drop: no
'''
import traceback
try:
import psycopg2
except ImportError:
postgresqldb_found = False
else:
postgresqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
def lang_exists(cursor, lang):
"""Checks if language exists for db"""
query = "SELECT lanname FROM pg_language WHERE lanname='%s'" % lang
cursor.execute(query)
return cursor.rowcount > 0
def lang_istrusted(cursor, lang):
"""Checks if language is trusted for db"""
query = "SELECT lanpltrusted FROM pg_language WHERE lanname='%s'" % lang
cursor.execute(query)
return cursor.fetchone()[0]
def lang_altertrust(cursor, lang, trust):
"""Changes if language is trusted for db"""
query = "UPDATE pg_language SET lanpltrusted = %s WHERE lanname=%s"
cursor.execute(query, (trust, lang))
return True
def lang_add(cursor, lang, trust):
"""Adds language for db"""
if trust:
query = 'CREATE TRUSTED LANGUAGE "%s"' % lang
else:
query = 'CREATE LANGUAGE "%s"' % lang
cursor.execute(query)
return True
def lang_drop(cursor, lang, cascade):
"""Drops language for db"""
cursor.execute("SAVEPOINT ansible_pgsql_lang_drop")
try:
if cascade:
cursor.execute("DROP LANGUAGE \"%s\" CASCADE" % lang)
else:
cursor.execute("DROP LANGUAGE \"%s\"" % lang)
except:
cursor.execute("ROLLBACK TO SAVEPOINT ansible_pgsql_lang_drop")
cursor.execute("RELEASE SAVEPOINT ansible_pgsql_lang_drop")
return False
cursor.execute("RELEASE SAVEPOINT ansible_pgsql_lang_drop")
return True
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default="postgres"),
login_password=dict(default="", no_log=True),
login_host=dict(default=""),
db=dict(required=True),
port=dict(default='5432'),
lang=dict(required=True),
state=dict(default="present", choices=["absent", "present"]),
trust=dict(type='bool', default='no'),
force_trust=dict(type='bool', default='no'),
cascade=dict(type='bool', default='no'),
fail_on_drop=dict(type='bool', default='yes'),
),
supports_check_mode=True
)
db = module.params["db"]
lang = module.params["lang"]
state = module.params["state"]
trust = module.params["trust"]
force_trust = module.params["force_trust"]
cascade = module.params["cascade"]
fail_on_drop = module.params["fail_on_drop"]
if not postgresqldb_found:
module.fail_json(msg="the python psycopg2 module is required")
params_map = {
"login_host": "host",
"login_user": "user",
"login_password": "password",
"port": "port",
"db": "database"
}
kw = dict((params_map[k], v) for (k, v) in module.params.items()
if k in params_map and v != "")
try:
db_connection = psycopg2.connect(**kw)
cursor = db_connection.cursor()
except Exception as e:
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
changed = False
kw = {'db': db, 'lang': lang, 'trust': trust}
if state == "present":
if lang_exists(cursor, lang):
lang_trusted = lang_istrusted(cursor, lang)
if (lang_trusted and not trust) or (not lang_trusted and trust):
if module.check_mode:
changed = True
else:
changed = lang_altertrust(cursor, lang, trust)
else:
if module.check_mode:
changed = True
else:
changed = lang_add(cursor, lang, trust)
if force_trust:
changed = lang_altertrust(cursor, lang, trust)
else:
if lang_exists(cursor, lang):
if module.check_mode:
changed = True
kw['lang_dropped'] = True
else:
changed = lang_drop(cursor, lang, cascade)
if fail_on_drop and not changed:
msg = "unable to drop language, use cascade to delete dependencies or fail_on_drop=no to ignore"
module.fail_json(msg=msg)
kw['lang_dropped'] = changed
if changed:
if module.check_mode:
db_connection.rollback()
else:
db_connection.commit()
kw['changed'] = changed
module.exit_json(**kw)
if __name__ == '__main__':
main()
| ravibhure/ansible | lib/ansible/modules/database/postgresql/postgresql_lang.py | Python | gpl-3.0 | 9,607 |
import threading
import re
import os
import datetime
import time
import sublime
from .console_write import console_write
from .package_installer import PackageInstaller
from .package_renamer import PackageRenamer
from .open_compat import open_compat, read_compat
class AutomaticUpgrader(threading.Thread):
"""
Automatically checks for updated packages and installs them. controlled
by the `auto_upgrade`, `auto_upgrade_ignore`, and `auto_upgrade_frequency`
settings.
"""
def __init__(self, found_packages):
"""
:param found_packages:
A list of package names for the packages that were found to be
installed on the machine.
"""
self.installer = PackageInstaller()
self.manager = self.installer.manager
self.load_settings()
self.package_renamer = PackageRenamer()
self.package_renamer.load_settings()
self.auto_upgrade = self.settings.get('auto_upgrade')
self.auto_upgrade_ignore = self.settings.get('auto_upgrade_ignore')
self.load_last_run()
self.determine_next_run()
# Detect if a package is missing that should be installed
self.missing_packages = list(set(self.installed_packages) -
set(found_packages))
if self.auto_upgrade and self.next_run <= time.time():
self.save_last_run(time.time())
threading.Thread.__init__(self)
def load_last_run(self):
"""
Loads the last run time from disk into memory
"""
self.last_run = None
self.last_run_file = os.path.join(sublime.packages_path(), 'User',
'Package Control.last-run')
if os.path.isfile(self.last_run_file):
with open_compat(self.last_run_file) as fobj:
try:
self.last_run = int(read_compat(fobj))
except ValueError:
pass
def determine_next_run(self):
"""
Figure out when the next run should happen
"""
self.next_run = int(time.time())
frequency = self.settings.get('auto_upgrade_frequency')
if frequency:
if self.last_run:
self.next_run = int(self.last_run) + (frequency * 60 * 60)
else:
self.next_run = time.time()
def save_last_run(self, last_run):
"""
Saves a record of when the last run was
:param last_run:
The unix timestamp of when to record the last run as
"""
with open_compat(self.last_run_file, 'w') as fobj:
fobj.write(str(int(last_run)))
def load_settings(self):
"""
Loads the list of installed packages from the
Package Control.sublime-settings file
"""
self.settings_file = 'Package Control.sublime-settings'
self.settings = sublime.load_settings(self.settings_file)
self.installed_packages = self.settings.get('installed_packages', [])
self.should_install_missing = self.settings.get('install_missing')
if not isinstance(self.installed_packages, list):
self.installed_packages = []
def run(self):
self.install_missing()
if self.next_run > time.time():
self.print_skip()
return
self.upgrade_packages()
def install_missing(self):
"""
Installs all packages that were listed in the list of
`installed_packages` from Package Control.sublime-settings but were not
found on the filesystem and passed as `found_packages`.
"""
if not self.missing_packages or not self.should_install_missing:
return
console_write(u'Installing %s missing packages' % len(self.missing_packages), True)
for package in self.missing_packages:
if self.installer.manager.install_package(package):
console_write(u'Installed missing package %s' % package, True)
def print_skip(self):
"""
Prints a notice in the console if the automatic upgrade is skipped
due to already having been run in the last `auto_upgrade_frequency`
hours.
"""
last_run = datetime.datetime.fromtimestamp(self.last_run)
next_run = datetime.datetime.fromtimestamp(self.next_run)
date_format = '%Y-%m-%d %H:%M:%S'
message_string = u'Skipping automatic upgrade, last run at %s, next run at %s or after' % (
last_run.strftime(date_format), next_run.strftime(date_format))
console_write(message_string, True)
def upgrade_packages(self):
"""
Upgrades all packages that are not currently upgraded to the lastest
version. Also renames any installed packages to their new names.
"""
if not self.auto_upgrade:
return
self.package_renamer.rename_packages(self.installer)
package_list = self.installer.make_package_list(['install',
'reinstall', 'downgrade', 'overwrite', 'none'],
ignore_packages=self.auto_upgrade_ignore)
# If Package Control is being upgraded, just do that and restart
for package in package_list:
if package[0] != 'Package Control':
continue
def reset_last_run():
# Re-save the last run time so it runs again after PC has
# been updated
self.save_last_run(self.last_run)
sublime.set_timeout(reset_last_run, 1)
package_list = [package]
break
if not package_list:
console_write(u'No updated packages', True)
return
console_write(u'Installing %s upgrades' % len(package_list), True)
disabled_packages = []
def do_upgrades():
# Wait so that the ignored packages can be "unloaded"
time.sleep(0.5)
# We use a function to generate the on-complete lambda because if
# we don't, the lambda will bind to info at the current scope, and
# thus use the last value of info from the loop
def make_on_complete(name):
return lambda: self.installer.reenable_package(name)
for info in package_list:
if info[0] in disabled_packages:
on_complete = make_on_complete(info[0])
else:
on_complete = None
self.installer.manager.install_package(info[0])
version = re.sub('^.*?(v[\d\.]+).*?$', '\\1', info[2])
if version == info[2] and version.find('pull with') != -1:
vcs = re.sub('^pull with (\w+).*?$', '\\1', version)
version = 'latest %s commit' % vcs
message_string = u'Upgraded %s to %s' % (info[0], version)
console_write(message_string, True)
if on_complete:
sublime.set_timeout(on_complete, 1)
# Disabling a package means changing settings, which can only be done
# in the main thread. We then create a new background thread so that
# the upgrade process does not block the UI.
def disable_packages():
disabled_packages.extend(self.installer.disable_packages([info[0] for info in package_list]))
threading.Thread(target=do_upgrades).start()
sublime.set_timeout(disable_packages, 1)
| dostavro/dotfiles | sublime2/Packages/Package Control/package_control/automatic_upgrader.py | Python | mit | 7,477 |
#!/usr/bin/env python
'''
File name: main_ripp_mod.py
Author: Guillaume Viejo
Date created: 16/08/2017
Python Version: 3.5.2
'''
import sys
import numpy as np
import pandas as pd
import scipy.io
from functions import *
# from pylab import *
from multiprocessing import Pool
import os
import neuroseries as nts
from time import time
from pylab import *
from functions import quickBin
from numba import jit
import _pickle as cPickle
@jit(nopython=True)
def scalarProduct(r):
tmp = np.sqrt(np.power(r, 2).sum(1))
denom = tmp[0:-1] * tmp[1:]
num = np.sum(r[0:-1]*r[1:], 1)
return num/(denom)
@jit(nopython=True)
def quickBin(spikelist, ts, bins, index):
rates = np.zeros((len(ts), len(bins)-1, len(index)))
for i, t in enumerate(ts):
tbins = t + bins
for j in range(len(spikelist)):
a, _ = np.histogram(spikelist[j], tbins)
rates[i,:,j] = a
return rates
data_directory = '/mnt/DataGuillaume/MergedData/'
datasets = np.loadtxt(data_directory+'datasets_ThalHpc.list', delimiter = '\n', dtype = str, comments = '#')
anglehd = {}
anglenohd = {}
zanglehd = {}
zanglenohd = {}
for session in datasets:
print(session)
generalinfo = scipy.io.loadmat(data_directory+session+'/Analysis/GeneralInfo.mat')
shankStructure = loadShankStructure(generalinfo)
if len(generalinfo['channelStructure'][0][0][1][0]) == 2:
hpc_channel = generalinfo['channelStructure'][0][0][1][0][1][0][0] - 1
else:
hpc_channel = generalinfo['channelStructure'][0][0][1][0][0][0][0] - 1
spikes,shank = loadSpikeData(data_directory+session+'/Analysis/SpikeData.mat', shankStructure['thalamus'])
wake_ep = loadEpoch(data_directory+session, 'wake')
sleep_ep = loadEpoch(data_directory+session, 'sleep')
sws_ep = loadEpoch(data_directory+session, 'sws')
rem_ep = loadEpoch(data_directory+session, 'rem')
sleep_ep = sleep_ep.merge_close_intervals(threshold=1.e3)
sws_ep = sleep_ep.intersect(sws_ep)
rem_ep = sleep_ep.intersect(rem_ep)
rip_ep,rip_tsd = loadRipples(data_directory+session)
rip_ep = sws_ep.intersect(rip_ep)
rip_tsd = rip_tsd.restrict(sws_ep)
hd_info = scipy.io.loadmat(data_directory+session+'/Analysis/HDCells.mat')['hdCellStats'][:,-1]
hd_info_neuron = np.array([hd_info[n] for n in spikes.keys()])
####################################################################################################################
# binning data
####################################################################################################################
spikeshd = {k:spikes[k] for k in np.where(hd_info_neuron==1)[0] if k not in []}
spikesnohd = {k:spikes[k] for k in np.where(hd_info_neuron==0)[0] if k not in []}
hdneurons = np.sort(list(spikeshd.keys()))
nohdneurons = np.sort(list(spikesnohd.keys()))
bin_size = 40
n_ex = 2000
rnd_tsd = nts.Ts(t = np.sort(np.hstack([np.random.randint(sws_ep.loc[i,'start']+500000, sws_ep.loc[i,'end']+500000, np.maximum(1,n_ex//len(sws_ep))) for i in sws_ep.index])))
####################################################################################################################
# MEAN AND STD SWS
####################################################################################################################
# # mean and standard deviation during SWS
# mean_sws = pd.DataFrame(index = np.sort(list(spikes.keys())), columns = ['min', 'max'])
# for n in spikes.keys():
# r = []
# for e in sws_ep.index:
# bins = np.arange(sws_ep.loc[e,'start'], sws_ep.loc[e,'end'], bin_size*1e3)
# a, _ = np.histogram(spikes[n].restrict(sws_ep.loc[[e]]).index.values, bins)
# r.append(a)
# r = np.hstack(r)
# r = r / (bin_size*1e-3)
# mean_sws.loc[n,'min']= r.min()
# mean_sws.loc[n,'max']= r.max()
bins = np.arange(0, 2000+2*bin_size, bin_size) - 1000 - bin_size/2
times = bins[0:-1] + np.diff(bins)/2
####################################################################################################################
# HD NEURONS
####################################################################################################################
if len(spikeshd) >=5:
ts = rip_tsd.as_units('ms').index.values
rates = quickBin([spikeshd[j].as_units('ms').index.values for j in hdneurons], ts, bins, hdneurons)
# # rates = rates /float(bin_size*1e-3)
# angle = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rip_tsd)))
# for i, r in enumerate(rates):
# tmp = scalarProduct(r)
# angle[i] = tmp
# random
ts = rnd_tsd.as_units('ms').index.values
rates2 = quickBin([spikeshd[j].as_units('ms').index.values for j in hdneurons], ts, bins, hdneurons)
# # rates2 = rates2/float(bin_size*1e-3)
# shuffled = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rnd_tsd)))
# for i, r in enumerate(rates2):
# tmp = scalarProduct(r)
# shuffled[i] = tmp
# anglehd[session] = (angle.mean(1) - shuffled.mean(1))/shuffled.mean(1)
# normalized
zangle = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rip_tsd)))
min_ = rates.min(0).min(0)
max_ = rates.max(0).max(0)
zrates = (rates - min_) / (max_ - min_)
for i, r in enumerate(zrates):
tmp = scalarProduct(r)
zangle[i] = tmp
# random
zshuffled = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rnd_tsd)))
min_ = rates2.min(0).min(0)
max_ = rates2.max(0).max(0)
zrates2 = (rates2 - min_) / (max_ - min_)
for i, r in enumerate(zrates2):
tmp = scalarProduct(r)
zshuffled[i] = tmp
zanglehd[session] = (zangle.mean(1) - zshuffled.mean(1))/zshuffled.mean(1)
anglehd[session] = zangle #.fillna(0)
####################################################################################################################
# NO HD NEURONS
####################################################################################################################
if len(spikesnohd) >=5:
ts = rip_tsd.as_units('ms').index.values
rates = quickBin([spikesnohd[j].as_units('ms').index.values for j in nohdneurons], ts, bins, nohdneurons)
# # rates = rates/float(bin_size*1e-3)
# angle = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rip_tsd)))
# for i, r in enumerate(rates):
# angle[i] = scalarProduct(r)
# random
ts = rnd_tsd.as_units('ms').index.values
rates2 = quickBin([spikesnohd[j].as_units('ms').index.values for j in nohdneurons], ts, bins, nohdneurons)
# # rates2 = rates2/float(bin_size*1e-3)
# shuffled = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rnd_tsd)))
# for i, r in enumerate(rates2):
# shuffled[i] = scalarProduct(r)
# anglenohd[session] = (angle.mean(1) - shuffled.mean(1))/shuffled.mean(1)
# normalized
zangle = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rip_tsd)))
min_ = rates.min(0).min(0)
max_ = rates.max(0).max(0)
zrates = (rates - min_) / (max_ - min_)
for i, r in enumerate(zrates):
zangle[i] = scalarProduct(r)
# random
zshuffled = pd.DataFrame(index = times[0:-1], columns = np.arange(len(rnd_tsd)))
# zrates2 = (rates2 - m) / (s+1)
min_ = rates2.min(0).min(0)
max_ = rates2.max(0).max(0)
zrates2 = (rates2 - min_) / (max_ - min_)
for i, r in enumerate(zrates2):
zshuffled[i] = scalarProduct(r)
zanglenohd[session] = (zangle.mean(1) - zshuffled.mean(1))/zshuffled.mean(1)
anglenohd[session] = zangle #.fillna(0)
# anglehd = pd.DataFrame.from_dict(anglehd)
# anglenohd = pd.DataFrame.from_dict(anglenohd)
# anglehd = anglehd.rolling(window=10,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=1)
# anglenohd = anglenohd.rolling(window=10,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=1)
zanglehd = pd.DataFrame.from_dict(zanglehd)
zanglenohd = pd.DataFrame.from_dict(zanglenohd)
zanglehd = zanglehd.rolling(window=10,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=1)
zanglenohd = zanglenohd.rolling(window=10,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=1)
# subplot(211)
# plot(anglehd.mean(1), label = 'hd')
# plot(anglenohd.mean(1), label = 'no hd')
# legend()
# title("Scalar product")
# subplot(212)
figure()
plot(zanglehd.mean(1))
plot(zanglenohd.mean(1))
legend()
title("Scalar product + norm [0 1]")
# comparing with isomap radius
path = '../figures/figures_articles_v4/figure1/'
files = [f for f in os.listdir(path) if '.pickle' in f and 'Mouse' in f]
files.remove("Mouse17-130129.pickle")
radius = []
velocity = []
stability = []
order = []
for f in files:
data = cPickle.load(open(path+f, 'rb'))
swrvel = []
swrrad = []
for n in data['swr'].keys():
iswr = data['swr'][n]['iswr']
rip_tsd = data['swr'][n]['rip_tsd']
times = data['swr'][n]['times']
normswr = np.sqrt(np.sum(np.power(iswr, 2), -1))
normswr = pd.DataFrame(index = times, columns = rip_tsd.index.values.astype('int'), data = normswr.T)
swrrad.append(normswr)
angswr = np.arctan2(iswr[:,:,1], iswr[:,:,0])
angswr = (angswr + 2*np.pi)%(2*np.pi)
tmp = []
for i in range(len(angswr)):
a = np.unwrap(angswr[i])
b = pd.Series(index = times, data = a)
c = b.rolling(window = 10, win_type='gaussian', center=True, min_periods=1).mean(std=1.0)
tmp.append(np.abs(np.diff(c.values))/0.1)
tmp = pd.DataFrame(index = times[0:-1] + np.diff(times)/2, columns = rip_tsd.index.values.astype('int'), data = np.array(tmp).T)
swrvel.append(tmp)
swrvel = pd.concat(swrvel, 1)
swrrad = pd.concat(swrrad, 1)
swrvel = swrvel.sort_index(1)
swrrad = swrrad.sort_index(1)
s = f.split('-')[0]+'/'+ f.split('.')[0]
stab = anglehd[s]
# cutting between -500 to 500
stab = stab.loc[-500:500]
# aligning swrrad.index to stab.index
newswrrad = []
for i in swrrad.columns:
y = swrrad[i].values
if len(y.shape) ==2 :
print("Bug in ", f)
y = y[:,0]
fi = scipy.interpolate.interp1d(swrrad.index.values, y)
newswrrad.append(fi(stab.index.values))
newswrrad = pd.DataFrame(index = stab.index.values, columns = swrrad.columns, data = np.array(newswrrad).T)
newswrvel = []
for i in swrvel.columns:
y = swrvel[i].values
if len(y.shape) ==2 :
y = y[:,0]
fi = scipy.interpolate.interp1d(swrvel.index.values, y)
newswrvel.append(fi(stab.index.values))
newswrvel = pd.DataFrame(index = stab.index.values, columns = swrvel.columns, data = np.array(newswrvel).T)
radius.append(newswrrad.mean(1))
stability.append(stab.mean(1))
velocity.append(newswrvel.mean(1))
order.append(f)
radius = pd.concat(radius, 1)
stability = pd.concat(stability, 1)
velocity = pd.concat(velocity, 1)
velocity =
stability = stability.apply(scipy.stats.zscore)
radius = radius.apply(scipy.stats.zscore)
velocity = velocity.apply(scipy.stats.zscore)
figure()
subplot(231)
for i in radius.columns:
plot(radius[i])
title("Radius")
subplot(232)
for i in velocity.columns:
plot(velocity[i])
title("Ang velocity")
subplot(233)
for i in stability.columns:
plot(stability[i])
title("Stability")
subplot(234)
for i in radius.columns:
scatter(radius[i], stability[i])
xlabel("Radius")
ylabel("Stability")
subplot(235)
for i in radius.columns:
scatter(velocity[i], stability[i])
xlabel("velocity")
ylabel("Stability")
tosave = {'velocity':velocity,
'radius':radius}
show()
sys.exit()
store = pd.HDFStore('../figures/figures_articles_v4/figure2/test.h5')
if normed:
store.append('anglehd_normed', anglehd)
store.append('anglenohd_normed', anglenohd)
else:
store.append('anglehd', anglehd)
store.append('anglenohd', anglenohd)
store.close()
figure()
store = pd.HDFStore('../figures/figures_articles_v4/figure2/test.h5')
subplot(2,2,1)
plot(store['anglehd'].mean(1), label = 'HD')
plot(store['anglenohd'].mean(1), label = 'non-HD')
legend()
title("Scalar Product")
subplot(2,2,2)
plot(store['pearsonhd'].mean(1), label = 'HD')
plot(store['pearsonnohd'].mean(1), label = 'non-HD')
legend()
title("Pearson Correlation")
subplot(2,2,3)
plot(store['anglehd_normed'].mean(1), label = 'HD')
plot(store['anglenohd_normed'].mean(1), label = 'non-HD')
legend()
title("Scalar Product normalized")
subplot(2,2,4)
plot(store['pearsonhd_normed'].mean(1), label = 'HD')
plot(store['pearsonnohd_normed'].mean(1), label = 'non-HD')
legend()
title("Pearson Correlation normalized")
show()
sys.exit()
anglehd = pd.DataFrame.from_dict(anglehd)
anglenohd = pd.DataFrame.from_dict(anglenohd)
plot(anglehd.mean(1), label = 'hd')
plot(anglenohd.mean(1), label = 'nohd')
legend()
show()
sys.exit()
datatosave = cPickle.load(open("/mnt/DataGuillaume/MergedData/SWR_SCALAR_PRODUCT.pickle", 'rb'))
angleall = datatosave['cosalpha']
baselineall = datatosave['baseline']
hd = pd.DataFrame()
for s in angleall.keys():
if 'hd' in list(angleall[s].keys()):
tmp1 = angleall[s]['hd'].rolling(window=100,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=0.5)
tmp2 = baselineall[s]['hd'].rolling(window=100,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=0.5)
tmp = (tmp1.mean(1) - tmp2.mean(1))/tmp2.mean(1)
hd[s.split("/")[1]] = tmp
nohd = pd.DataFrame()
for s in angleall.keys():
if 'nohd' in list(angleall[s].keys()):
tmp1 = angleall[s]['nohd'].rolling(window=100,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=1)
tmp2 = baselineall[s]['nohd'].rolling(window=100,win_type='gaussian',center=True,min_periods=1, axis = 0).mean(std=1)
tmp = (tmp1.mean(1) - tmp2.mean(1))/tmp2.mean(1)
nohd[s.split("/")[1]] = tmp
data = pd.DataFrame(index = hd.index.values, columns = pd.MultiIndex.from_product([['hd', 'nohd'], ['mean', 'sem']]))
data['hd', 'mean'] = hd.mean(1)
data['hd', 'sem'] = hd.sem(1)
data['nohd', 'mean'] = nohd.mean(1)
data['nohd', 'sem'] = nohd.sem(1)
data.to_hdf("../figures/figures_articles_v4/figure2/SWR_SCALAR_PRODUCT.h5", 'w')
subplot(111)
m = hd.mean(1)
v = hd.sem(1)
plot(hd.mean(1), label = 'hd')
fill_between(hd.index.values, m+v, m-v, alpha = 0.5)
# title("Only hd")
# subplot(212)
# title("No hd")
m = nohd.mean(1)
v = nohd.sem(1)
plot(nohd.mean(1), label = 'nohd')
fill_between(nohd.index.values, m+v, m-v, alpha = 0.5)
legend()
figure()
subplot(121)
plot(hd, color = 'grey')
plot(hd.mean(1), color = 'red')
title("HD")
subplot(122)
plot(nohd, color = 'grey')
plot(nohd.mean(1), color = 'black')
title("No HD")
show()
| gviejo/ThalamusPhysio | python/main_make_SWS_scalar_product.py | Python | gpl-3.0 | 14,308 |
import asyncio
import glob
import importlib
import json
import logging
import os
import pkg_resources
from aiohttp import web
import dcos_installer.action_lib
import gen.calc
import pkgpanda.util
from dcos_installer import backend
from dcos_installer.config import Config, make_default_config_if_needed
from dcos_installer.constants import CONFIG_PATH, IP_DETECT_PATH, SSH_KEY_PATH, STATE_DIR
from ssh.runner import Node
log = logging.getLogger()
options = None
VERSION = '1'
ui_dist_path = os.getenv('INSTALLER_UI_PATH', pkg_resources.resource_filename(__name__, 'templates/'))
index_path = '{}index.html'.format(ui_dist_path)
assets_path = '{}assets/'.format(ui_dist_path)
# Dict containing action name to handler mappings.
action_map = {
'preflight': dcos_installer.action_lib.run_preflight,
'deploy': dcos_installer.action_lib.install_dcos,
'postflight': dcos_installer.action_lib.run_postflight,
}
remove_on_done = ['preflight', 'postflight']
# TODO(cmaloney): Kill this. Should store somewhere proper
current_action = ""
def root(request):
"""Return the root endpoint, serve the index.html.
:param request: a web requeest object.
:type request: request | None
"""
log.info("Root page requested.")
index_file = open(index_path)
log.info("Serving %s", index_path)
resp = web.Response(body=index_file.read().encode('utf-8'))
resp.headers['content-type'] = 'text/html'
return resp
def redirect_to_root(request):
"""Return the redirect from /api/v1 to /
:param request: a web requeest object.
:type request: request | None
"""
log.warning("/api/v{} -> redirecting -> /".format(VERSION))
return web.HTTPFound('/'.format(VERSION))
def get_version(args):
resp = web.json_response({'version': gen.calc.entry['must']['dcos_version']})
resp.headers['Content-Type'] = 'application/json'
return resp
def try_read_file(path):
if os.path.isfile(path):
return pkgpanda.util.load_string(path)
return None
def extract_external(post_data, start_key, dest_key, filename, mode) -> dict:
if start_key not in post_data:
return post_data
value = post_data[start_key]
if not value:
log.warning('Skipping write {} to {} because it looked empty.'.format(value, filename))
return post_data
log.warning('Writing {}'.format(filename))
pkgpanda.util.write_string(filename, value)
os.chmod(filename, mode)
del post_data[start_key]
post_data[dest_key] = filename
return post_data
def configure(request):
"""Return /api/v1/configure
:param request: a web requeest object.
:type request: request | None
"""
if request.method == 'POST':
new_config = yield from request.json()
# Save ssh_key, ip_detect as needed
# TODO(cmaloney): make ssh_key derive from ssh_key_path so we can just set ssh_key and skip all this.
new_config = extract_external(new_config, 'ssh_key', 'ssh_key_path', SSH_KEY_PATH, 0o600)
# TODO(cmaloney): change this to ip_detect_contents removing the need for the remapping.
new_config = extract_external(new_config, 'ip_detect_script', 'ip_detect_path', IP_DETECT_PATH, 0o644)
log.info('POST to configure: {}'.format(new_config))
messages = backend.create_config_from_post(new_config, CONFIG_PATH)
# Map back to DC/OS UI configuration parameters.
# TODO(cmaloney): Remove need to remap validation keys. The remapping is making things show up
# under the key of the user config chunk that caused them rather than their particular key so
# num_masters validation for instance shows up under master_list where the user would expect it.
if "ssh_key_path" in messages:
messages["ssh_key"] = messages["ssh_key_path"]
if "ip_detect_contents" in messages:
messages['ip_detect_path'] = messages['ip_detect_contents']
if 'num_masters' in messages:
messages['master_list'] = messages['num_masters']
resp = web.json_response({}, status=200)
if messages:
resp = web.json_response(messages, status=400)
return resp
elif request.method == 'GET':
config = Config(CONFIG_PATH).config
# TODO(cmaloney): should exclude the value entirely if the file doesn't exist.
config['ssh_key'] = try_read_file(SSH_KEY_PATH)
config['ip_detect_script'] = try_read_file(IP_DETECT_PATH)
resp = web.json_response(config)
resp.headers['Content-Type'] = 'application/json'
return resp
def configure_status(request):
"""Return /configure/status
:param request: a web requeest object.
:type request: request | None
"""
log.info("Request for configuration validation made.")
code = 200
messages = Config(CONFIG_PATH).do_validate(include_ssh=True)
if messages:
code = 400
resp = web.json_response(messages, status=code)
return resp
def configure_type(request):
"""Return /configure/type
:param request: a web requeest object.
:type request: request | None
"""
log.info("Request for configuration type made.")
return web.json_response(backend.determine_config_type())
def success(request):
"""Return /success
:param request: a web requeest object.
:type request: request | None
"""
log.info("Request for success made.")
msgs, code = backend.success(Config(CONFIG_PATH))
return web.json_response(msgs, status=code)
def unlink_state_file(action_name):
json_status_file = STATE_DIR + '/{}.json'.format(action_name)
if os.path.isfile(json_status_file):
log.debug('removing {}'.format(json_status_file))
os.unlink(json_status_file)
return True
log.debug('cannot remove {}, file not found'.format(json_status_file))
return False
def read_json_state(action_name):
json_status_file = STATE_DIR + '/{}.json'.format(action_name)
if not os.path.isfile(json_status_file):
return False
with open(json_status_file) as fh:
return json.load(fh)
def action_action_name(request):
"""Return /action/<action_name>
:param request: a web requeest object.
:type request: request | None
"""
global current_action
action_name = request.match_info['action_name']
# Update the global action
json_state = read_json_state(action_name)
current_action = action_name
if request.method == 'GET':
log.info('GET {}'.format(action_name))
if json_state:
return web.json_response(json_state)
return web.json_response({})
elif request.method == 'POST':
log.info('POST {}'.format(action_name))
action = action_map.get(action_name)
# If the action name is preflight, attempt to run configuration
# generation. If genconf fails, present the UI with a usable error
# for the end-user
if action_name == 'preflight':
try:
log.warning("GENERATING CONFIGURATION")
backend.do_configure()
except:
genconf_failure = {
"errors": "Configuration generation failed, please see command line for details"
}
return web.json_response(genconf_failure, status=400)
params = yield from request.post()
if json_state:
if action_name == 'deploy' and 'retry' in params:
if 'hosts' in json_state:
failed_hosts = []
for deploy_host, deploy_params in json_state['hosts'].items():
if deploy_params['host_status'] != 'success':
failed_hosts.append(Node(
deploy_host, tags=deploy_params['tags'],
default_port=int(Config(CONFIG_PATH).hacky_default_get('ssh_port', 22))))
log.debug('failed hosts: {}'.format(failed_hosts))
if failed_hosts:
yield from asyncio.async(
action(
Config(CONFIG_PATH),
state_json_dir=STATE_DIR,
hosts=failed_hosts,
try_remove_stale_dcos=True,
**params))
return web.json_response({
'status': 'retried',
'details': sorted(['{}:{}'.format(node.ip, node.port) for node in failed_hosts])
})
if action_name not in remove_on_done:
return web.json_response({'status': '{} was already executed, skipping'.format(action_name)})
running = False
for host, attributes in json_state['hosts'].items():
if attributes['host_status'].lower() == 'running':
running = True
log.debug('is action running: {}'.format(running))
if running:
return web.json_response({'status': '{} is running, skipping'.format(action_name)})
else:
unlink_state_file(action_name)
yield from asyncio.async(action(Config(CONFIG_PATH), state_json_dir=STATE_DIR, options=options, **params))
return web.json_response({'status': '{} started'.format(action_name)})
def action_current(request):
"""Return the current action /action/current endpoint.
:param request: a web requeest object.
:type request: request | None
"""
return web.json_response({'current_action': current_action})
def logs_handler(request):
"""Return the log file on disk.
:param request: a web requeest object.
:type request: request | None
"""
log.info("Request for logs endpoint made.")
complete_log_path = STATE_DIR + '/complete.log'
json_files = glob.glob(STATE_DIR + '/*.json')
complete_log = []
for f in json_files:
log.debug('Adding {} to complete log file.'.format(f))
with open(f) as blob:
complete_log.append(json.loads(blob.read()))
with open(complete_log_path, 'w') as f:
f.write(json.dumps(complete_log, indent=4, sort_keys=True))
return web.HTTPFound('/download/log/complete.log'.format(VERSION))
def build_app(loop):
"""Define the aiohttp web application framework and setup the routes to be used in the API"""
global current_action
app = web.Application(loop=loop)
current_action = ''
# Disable all caching for everything, disable once the Web UI gets cache
# breaking urls for it's assets (still need to not cache the REST responses, index.html though)
# TODO(cmaloney): Python 3.6 switch this to `async def` per:
# http://aiohttp.readthedocs.io/en/stable/web.html#signals
def no_caching(request, response):
response.headers['Cache-Control'] = 'no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
app.on_response_prepare.append(no_caching)
app.router.add_route('GET', '/', root)
app.router.add_route('GET', '/api/v{}'.format(VERSION), redirect_to_root)
app.router.add_route('GET', '/api/v{}/version'.format(VERSION), get_version)
app.router.add_route('GET', '/api/v{}/configure'.format(VERSION), configure)
app.router.add_route('POST', '/api/v{}/configure'.format(VERSION), configure)
app.router.add_route('GET', '/api/v{}/configure/status'.format(VERSION), configure_status)
app.router.add_route('GET', '/api/v{}/configure/type'.format(VERSION), configure_type)
app.router.add_route('GET', '/api/v{}/success'.format(VERSION), success)
# TODO(malnick) The regex handling in the variable routes blows up if we insert another variable to be
# filled in by .format. Had to hardcode the VERSION into the URL for now. Fix suggestions please!
app.router.add_route('GET', '/api/v1/action/{action_name:preflight|postflight|deploy}', action_action_name)
app.router.add_route('POST', '/api/v1/action/{action_name:preflight|postflight|deploy}', action_action_name)
app.router.add_route('GET', '/api/v{}/action/current'.format(VERSION), action_current)
app.router.add_route('GET', '/api/v{}/logs'.format(VERSION), logs_handler)
# TODO(cmaloney): These should probably actually hard fail.
try:
# Passing an absolute path because we don't trust add_static() to resolve relative paths for us.
app.router.add_static('/assets', os.path.abspath(assets_path))
app.router.add_static('/download/log', os.path.abspath(STATE_DIR))
except ValueError as err:
log.warning(err)
# Allow overriding calculators with a `gen_extra/async_server.py` if it exists
if os.path.exists('gen_extra/async_server.py'):
mod = importlib.machinery.SourceFileLoader('gen_extra.async_server', 'gen_extra/async_server.py').load_module()
mod.extend_app(app)
return app
def start(cli_options):
global options
options = cli_options
log.debug('DC/OS Installer')
make_default_config_if_needed(CONFIG_PATH)
loop = asyncio.get_event_loop()
app = build_app(loop)
handler = app.make_handler()
f = loop.create_server(
handler,
'0.0.0.0',
cli_options.port)
srv = loop.run_until_complete(f)
log.info('Starting server {}'.format(srv.sockets[0].getsockname()))
if os.path.isdir(STATE_DIR):
for state_file in glob.glob(STATE_DIR + '/*.json'):
try:
os.unlink(state_file)
log.debug('removing {}'.format(state_file))
except FileNotFoundError:
log.error('{} not found'.format(state_file))
except PermissionError:
log.error('cannot remove {}, Permission denied'.format(state_file))
else:
os.makedirs(STATE_DIR)
assert os.path.isdir(assets_path)
assert os.path.isdir(STATE_DIR)
try:
loop.run_forever()
except KeyboardInterrupt:
srv.close()
loop.run_until_complete(handler.finish_connections(1.0))
loop.run_until_complete(app.finish())
loop.close()
| mellenburg/dcos | dcos_installer/async_server.py | Python | apache-2.0 | 14,276 |
def gcd(a, b):
"""
Input:
a: A nonnegative int
b: A nonnegative int
Precondition:
isinstance(a, int) and isinstance(b, int)
Output:
The greatest int that divides evenly into a and b
"""
if b == 0:
return a
else:
return gcd(b, a % b)
def lcm(num1, num2):
result = num1 * num2 / gcd(num1, num2)
return result
if __name__ == "__main__":
assert gcd(35, 21) == 7
| evandrix/Splat | code/demo/quixey/mine_tests/gcd.py | Python | mit | 448 |
from opentrons.types import Point
MOVE_TO_TIP_RACK_SAFETY_BUFFER = Point(0, 0, 10)
# Add in a 2mm buffer to tiprack thresholds on top of
# the max acceptable range for a given pipette based
# on calibration research data.
DEFAULT_OK_TIP_PICK_UP_VECTOR = Point(3.79, 3.64, 2.8)
P1000_OK_TIP_PICK_UP_VECTOR = Point(4.7, 4.7, 2.8)
# The tolerances below are absolute values that a pipette
# might be off due to things that cannot be controlled
# such as tip straightness or slight changes betweeen
# tip length. Please review the Motion research for
# further information.
PIPETTE_TOLERANCES = {
'p1000_crosses': Point(2.7, 2.7, 0.0),
'p1000_height': Point(0.0, 0.0, 1.0),
'p300_crosses': Point(1.8, 1.8, 0.0),
'p20_crosses': Point(1.4, 1.4, 0.0),
'other_height': Point(0.0, 0.0, 0.8)
}
| Opentrons/labware | robot-server/robot_server/robot/calibration/check/constants.py | Python | apache-2.0 | 809 |
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire X (FoFiX) #
# Copyright (C) 2006 Sami Kyöstilä #
# 2008 myfingershurt #
# 2008 Blazingamer #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import Log
import Version
import os
import sys
import imp
import Config
import Song
from OpenGL.GL import *
from OpenGL.GLU import *
import string
import math
from Language import _
from Shader import shaders
from Task import Task
#Theme Constants.
LEFT = 0
CENTER = 1
RIGHT = 2
GUITARTYPES = [0, 1, 4]
DRUMTYPES = [2, 3]
MICTYPES = [5]
defaultDict = {}
classNames = {'setlist': lambda x: Setlist(x), 'themeLobby': lambda x: ThemeLobby(x), 'partDiff': lambda x: ThemeParts(x)}
class Theme(Task):
def __getattr__(self, attr):
try: #getting to this function is kinda slow. Set it on the first get to keep renders from lagging.
object.__getattribute__(self, '__dict__')[attr] = defaultDict[attr]
if Config.get("game", "log_undefined_gets") == 1:
Log.debug("No theme variable for %s - Loading default..." % attr)
return object.__getattribute__(self, attr)
except KeyError:
if attr in classNames.keys():
Log.warn("No theme class for %s - Loading default..." % attr)
object.__getattribute__(self, '__dict__')[attr] = classNames[attr](self)
return object.__getattribute__(self, attr)
elif attr.startswith('__') and attr.endswith('__'): #for object's attributes (eg: __hash__, __eq__)
return object.__getattribute__(self, attr)
Log.error("Attempted to load theme variable %s - no default found." % attr)
def __init__(self, path, name):
self.name = name
self.path = path
self.themePath = os.path.join(Version.dataPath(),"themes", name)
if not os.path.exists(self.themePath):
Log.warn("Theme: %s does not exist!\n" % self.themePath)
name = Config.get("coffee", "themename")
Log.notice("Theme: Attempting fallback to default theme \"%s\"." % name)
self.themePath = os.path.join(Version.dataPath(),"themes", name)
if not os.path.exists(self.themePath):
Log.error("Theme: %s does not exist!\nExiting.\n" % self.themePath)
sys.exit(1)
else:
if os.path.exists(os.path.join(self.themePath, "theme.ini")):
self.config = Config.MyConfigParser()
self.config.read(os.path.join(self.themePath, "theme.ini"))
Log.debug("theme.ini loaded")
else:
self.config = None
Log.debug("no theme.ini")
config = self.config
def get(value, type = str, default = None):
if type == "color":
if self.config:
if self.config.has_option("theme", value):
return self.hexToColor(self.config.get("theme", value))
return self.hexToColor(default)
else:
if self.config:
if self.config.has_option("theme", value):
return type(self.config.get("theme", value))
return default
#These colors are very important
#background_color defines what color openGL will clear too
# (the color that shows then no image is present)
#base_color is the default color of text in menus
#selected_color is the color of text when it is selected
# (like in the settings menu or when selecting a song)
self.backgroundColor = get("background_color", "color", "#000000")
self.baseColor = get("base_color", "color", "#FFFFFF")
self.selectedColor = get("selected_color", "color", "#FFBF00")
#notes that are not textured are drawn in 3 parts (Mesh, Mesh_001, Mesh_002, and occasionally Mesh_003)
#The color of mesh is set by mesh_color (on a GH note this is the black ring)
#The color of the Mesh_001 is the color of the note (green, red, yellow, etc)
#Mesh_002 is set by the hopo_color but if Mesh_003 is present it will be colored spot_color
#When Mesh_003 is present it will be colored hopo_color
self.meshColor = get("mesh_color", "color", "#000000")
self.hopoColor = get("hopo_color", "color", "#00AAAA")
self.spotColor = get("spot_color", "color", "#FFFFFF")
#keys when they are not textured are made of three parts (Mesh, Key_001, Key_002),
#two of which can be colored by the CustomTheme.py or the Theme.ini (Mesh, Mesh_002).
#These will only work if the object has a Glow_001 mesh in it, else it will render
#the whole object the color of the fret
#Key_001 is colored by key_color, Key_002 is colored by key2_color, pretty obvious, eh?
self.keyColor = get("key_color", "color", "#333333")
self.key2Color = get("key2_color", "color", "#000000")
#when a note is hit a glow will show aside from the hitflames, this has been around
#since the original Frets on Fire. What glow_color allows you to do is set it so
#the glow is either the color of the fret it's over or it can be the color the image
#actually is (if the image is white then no matter what key is hit the glow will be white)
self.glowColor = get("glow_color", str, "fret")
if not self.glowColor == "frets":
self.glowColor = self.hexToColor(self.glowColor)
#Note Colors (this applies to frets and notes)
#default is green, red, yellow, blue, orange, purple (I don't know why there's a 6th color)
default_color = ["#22FF22", "#FF2222", "#FFFF22", "#3333FF", "#FF9933", "#CC22CC"]
self.noteColors = [get("fret%d_color" % i, "color", default_color[i]) for i in range(6)]
self.spNoteColor = get("fretS_color", "color", "#4CB2E5")
self.killNoteColor = get("fretK_color", "color", "#FFFFFF")
#just like glow_color, this allows you to have tails use either the color of the note
#or the actual color of the tail
self.use_fret_colors = get("use_fret_colors", bool, False)
self.fretPress = get("fretPress", bool, False)
#Point of View (x, y, z)
self.povTarget = (get("pov_target_x", float), get("pov_target_y", float), get("pov_target_z", float))
self.povOrigin = (get("pov_origin_x", float), get("pov_origin_y", float), get("pov_origin_z", float))
#Loading phrases
self.loadingPhrase = get("loading_phrase", str, "Let's get this show on the Road_Impress the Crowd_" +
"Don't forget to strum!_Rock the house!_Jurgen is watching").split("_")
self.resultsPhrase = get("results_phrase", str, "").split("_")
#crowd_loop_delay controls how long (in milliseconds) FoFiX needs to wait before
#playing the crowd noise again in the results screen after it finishes
self.crowdLoopDelay = get("crowd_loop_delay", int)
#When a song starts up it displays the info of the song (artist, name, etc)
#positioning and the size of the font are handled by these values respectively
self.songInfoDisplayScale = get("song_info_display_scale", float, 0.0020)
self.songInfoDisplayX = get("song_info_display_X", float, 0.05)
self.songInfoDisplayY = get("song_info_display_Y", float, 0.05)
#when AI is enabled, this value controls where in the player's window
#it should say that "Jurgen is here" and how large the words need to be
self.jurgTextPos = get("jurgen_text_pos", str, "1,1,.00035").split(",")
#just a little misc option that allows you to change the name of what you
#what starpower/overdrive to be called. Some enjoy the classic Jurgen Power
#name from Hering's mod.
self.power_up_name = get("power_up_name", str, "Jurgen Power")
self.countdownPosX = get("countdown_pos_x", float, 0.5)
self.countdownPosY = get("countdown_pos_y", float, 0.45)
#These values determine the width of the neck as well as the length of it
#width seems pretty obvious but length has an advantage in that by making
#it shorter the fade away comes sooner. This is handy for unique POV because
#sometimes static hud object (the lyric display) can get in the way.
self.neckWidth = get("neck_width", float, 3.0)
self.neckLength = get("neck_length", float, 9.0)
#When in the neck choosing screen, these values determine the position of the
#prompt that is usually at the top of the screen and says how to choose a neck
self.neck_prompt_x = get("menu_neck_choose_x", float, 0.1)
self.neck_prompt_y = get("menu_neck_choose_y", float, 0.05)
#Setlist
#This is really a bit of a mess but luckily most of the names are quite self
#explanatory. These values are only necessary if your theme is using the old
#default code that takes advantage of having the 4 different modes
#list, cd, list/cd hybrid, rb2
#if you're not using the default setlist display then don't bother with these values
self.songListDisplay = get("song_list_display", int, 0)
self.setlistguidebuttonsposX = get("setlistguidebuttonsposX", float, 0.408)
self.setlistguidebuttonsposY = get("setlistguidebuttonsposY", float, 0.0322)
self.setlistguidebuttonsscaleX = get("setlistguidebuttonsscaleX", float, 0.29)
self.setlistguidebuttonsscaleY = get("setlistguidebuttonsscaleY", float, 0.308)
self.setlistpreviewbuttonposX = get("setlistpreviewbuttonposX", float, 0.5)
self.setlistpreviewbuttonposY = get("setlistpreviewbuttonposY", float, 0.5)
self.setlistpreviewbuttonscaleX = get("setlistpreviewbuttonscaleX", float, 0.5)
self.setlistpreviewbuttonscaleY = get("setlistpreviewbuttonscaleY", float, 0.5)
self.songSelectSubmenuOffsetLines = get("song_select_submenu_offset_lines")
self.songSelectSubmenuOffsetSpaces = get("song_select_submenu_offset_spaces")
self.songSelectSubmenuX = get("song_select_submenu_x")
self.songSelectSubmenuY = get("song_select_submenu_y")
self.song_cd_Xpos = get("song_cd_x", float, 0.0)
self.song_cdscore_Xpos = get("song_cdscore_x", float, 0.6)
self.song_listcd_cd_Xpos = get("song_listcd_cd_x", float, .75)
self.song_listcd_cd_Ypos = get("song_listcd_cd_y", float, .6)
self.song_listcd_score_Xpos = get("song_listcd_score_x", float, .6)
self.song_listcd_score_Ypos = get("song_listcd_score_y", float, .5)
self.song_listcd_list_Xpos = get("song_listcd_list_x", float, .1)
self.song_list_Xpos = get("song_list_x", float, 0.15)
self.song_listscore_Xpos = get("song_listscore_x", float, 0.8)
self.songlist_score_colorVar = get("songlist_score_color", "color", "#93C351")
self.songlistcd_score_colorVar = get("songlistcd_score_color", "color", "#FFFFFF")
self.career_title_colorVar = get("career_title_color", "color", "#000000")
self.song_name_text_colorVar = get("song_name_text_color", "color", "#FFFFFF")
self.song_name_selected_colorVar = get("song_name_selected_color", "color", "#FFBF00")
self.artist_text_colorVar = get("artist_text_color", "color", "#4080FF")
self.artist_selected_colorVar = get("artist_selected_color", "color", "#4080FF")
self.library_text_colorVar = get("library_text_color", "color", "#FFFFFF")
self.library_selected_colorVar = get("library_selected_color", "color", "#FFBF00")
self.song_rb2_diff_colorVar = get("song_rb2_diff_color", "color", "#FFBF00")
#These determine the position of the version tag on the main menu.
#Usually it's easier to just create a 640x480 picture and position it
#in that because by default the image is position in the middle of the window
self.versiontagposX = get("versiontagposX", float, 0.5)
self.versiontagposY = get("versiontagposY", float, 0.5)
#pause menu and fail menu positions and text colors
self.pause_bkg_pos = get("pause_bkg", str, "0.5,0.5,1.0,1.0").split(",")
self.pause_text_xPos = get("pause_text_x", float)
self.pause_text_yPos = get("pause_text_y", float)
self.pause_text_colorVar = get("pause_text_color", "color", "#FFFFFF")
self.pause_selected_colorVar = get("pause_selected_color", "color", "#FFBF00")
self.fail_completed_colorVar = get("fail_completed_color", "color", "#FFFFFF")
self.fail_text_colorVar = get("fail_text_color", "color", "#FFFFFF")
self.fail_selected_colorVar = get("fail_selected_color", "color", "#FFBF00")
self.fail_bkg_pos = get("fail_bkg", str, "0.5,0.5,1.0,1.0").split(",")
self.fail_text_xPos = get("fail_text_x", float)
self.fail_text_yPos = get("fail_text_y", float)
self.fail_songname_xPos = get("fail_songname_x", float, 0.5)
self.fail_songname_yPos = get("fail_songname_y", float, 0.35)
self.opt_bkg_size = get("opt_bkg", str, "0.5,0.5,1.0,1.0").split(",")
self.opt_text_xPos = get("opt_text_x", float)
self.opt_text_yPos = get("opt_text_y", float)
self.opt_text_colorVar = get("opt_text_color", "color", "#FFFFFF")
self.opt_selected_colorVar = get("opt_selected_color", "color", "#FFBF00")
#main menu system
self.menuPos = [get("menu_x", float, 0.2), get("menu_y", float, 0.8)]
self.menuRB = get("rbmenu", bool, False)
self.main_menu_scaleVar = get("main_menu_scale", float, 0.5)
self.main_menu_vspacingVar = get("main_menu_vspacing", float, .09)
self.use_solo_submenu = get("use_solo_submenu", bool, True)
#Settings option scale
self.settingsmenuScale = get("settings_menu_scale", float, 0.002)
#loading Parameters
self.loadingX = get("loading_x", float, 0.5)
self.loadingY = get("loading_y", float, 0.6)
self.loadingFScale = get("loading_font_scale", float, 0.0015)
self.loadingRMargin = get("loading_right_margin", float, 1.0)
self.loadingLSpacing = get("loading_line_spacing", float, 1.0)
self.loadingColor = get("loading_text_color", "color", "#FFFFFF")
#this is the amount you can offset the shadow in the loading screen text
self.shadowoffsetx = get("shadowoffsetx", float, .0022)
self.shadowoffsety = get("shadowoffsety", float, .0005)
self.sub_menu_xVar = get("sub_menu_x", float, None)
self.sub_menu_yVar = get("sub_menu_y", float, None)
#self.songback = get("songback")
self.versiontag = get("versiontag", bool, False)
#these are the little help messages at the bottom of the
#options screen when you hover over an item
self.menuTipTextY = get("menu_tip_text_y", float, .7)
self.menuTipTextFont = get("menu_tip_text_font", str, "font")
self.menuTipTextScale = get("menu_tip_text_scale", float, .002)
self.menuTipTextColor = get("menu_tip_text_color", "color", "#FFFFFF")
self.menuTipTextScrollSpace = get("menu_tip_text_scroll_space", float, .25)
self.menuTipTextScrollMode = get("menu_tip_text_scroll_mode", int, 0)
self.menuTipTextDisplay = get("menu_tip_text_display", bool, False)
#Lobby
self.controlActivateX = get("control_activate_x", float, 0.645)
self.controlActivateSelectX = get("control_activate_select_x", float, 0.5)
self.controlActivatePartX = get("control_activate_part_x", float, 0.41)
self.controlActivateY = get("control_activate_y", float, 0.18)
self.controlActivateScale = get("control_activate_scale", float, 0.0018)
self.controlActivateSpace = get("control_activate_part_size", float, 22.000)
self.controlActivatePartSize = get("control_activate_space", float, 0.045)
self.controlActivateFont = get("control_activate_font", str, "font")
self.controlDescriptionX = get("control_description_x", float, 0.5)
self.controlDescriptionY = get("control_description_y", float, 0.13)
self.controlDescriptionScale = get("control_description_scale", float, 0.002)
self.controlDescriptionFont = get("control_description_font", str, "font")
self.controlCheckX = get("control_description_scale", float, 0.002)
self.controlCheckY = get("control_check_x", float, 0.16)
self.controlCheckTextY = get("control_check_text_y", float, 0.61)
self.controlCheckPartMult = get("control_check_part_mult", float, 2.8)
self.controlCheckScale = get("control_check_space", float, 0.23)
self.controlCheckSpace = get("control_check_scale", float, 0.0018)
self.controlCheckFont = get("control_check_font", str, "font")
self.lobbyMode = get("lobby_mode", int, 0)
self.lobbyPreviewX = get("lobby_preview_x", float, 0.7)
self.lobbyPreviewY = get("lobby_preview_y", float, 0.0)
self.lobbyPreviewSpacing = get("lobby_preview_spacing", float, 0.04)
self.lobbyTitleX = get("lobby_title_x", float, 0.5)
self.lobbyTitleY = get("lobby_title_y", float, 0.07)
self.lobbyTitleCharacterX = get("lobby_title_character_x", float, 0.26)
self.lobbyTitleCharacterY = get("lobby_title_character_y", float, 0.24)
self.lobbyTitleScale = get("lobby_title_scale", float, 0.0024)
self.lobbyTitleFont = get("lobby_title_font", str, "loadingFont")
self.lobbyAvatarX = get("lobby_avatar_x", float, 0.7)
self.lobbyAvatarY = get("lobby_avatar_y", float, 0.75)
self.lobbyAvatarScale = get("lobby_avatar_scale", float, 1.0)
self.lobbySelectX = get("lobby_select_x", float, 0.4)
self.lobbySelectY = get("lobby_select_y", float, 0.32)
self.lobbySelectImageX = get("lobby_select_image_x", float, 0.255)
self.lobbySelectImageY = get("lobby_select_image_y", float, 0.335)
self.lobbySelectScale = get("lobby_select_scale", float, 0.0018)
self.lobbySelectSpace = get("lobby_select_space", float, 0.04)
self.lobbySelectFont = get("lobby_select_font", str, "font")
self.lobbySelectLength = get("lobby_select_length", int, 5)
self.lobbyTitleColor = get("lobby_title_color", "color", "#FFFFFF")
self.lobbyInfoColor = get("lobby_info_color", "color", "#FFFFFF")
self.lobbyFontColor = get("lobby_font_color", "color", "#FFFFFF")
self.lobbyPlayerColor = get("lobby_player_color", "color", "#FFFFFF")
self.lobbySelectColor = get("lobby_select_color", "color", "#FFBF00")
self.lobbyDisableColor = get("lobby_disable_color", "color", "#666666")
self.characterCreateX = get("character_create_x", float, 0.25)
self.characterCreateY = get("character_create_y", float, 0.15)
self.characterCreateHelpX = get("character_create_help_x", float, 0.5)
self.characterCreateHelpY = get("character_create_help_y", float, 0.73)
self.characterCreateScale = get("character_create_scale", float, 0.0018)
self.characterCreateSpace = get("character_create_space", float, 0.045)
self.characterCreateHelpScale = get("character_create_help_scale", float, 0.0018)
self.characterCreateOptionX = get("character_create_option_x", float, 0.75)
self.characterCreateOptionFont = get("character_create_option_font", str, "font")
self.characterCreateHelpFont = get("character_create_help_font", str, "loadingFont")
self.characterCreateFontColor = get("character_create_font_color", "color", "#FFFFFF")
self.characterCreateSelectColor = get("character_create_select_color", "color", "#FFBF00")
self.characterCreateHelpColor = get("character_create_help_color", "color", "#FFFFFF")
self.avatarSelectTextX = get("avatar_select_text_x", float, 0.44)
self.avatarSelectTextY = get("avatar_select_text_y", float, 0.16)
self.avatarSelectTextScale = get("avatar_select_text_scale", float, 0.0027)
self.avatarSelectAvX = get("avatar_select_avatar_x", float, 0.667)
self.avatarSelectAvY = get("avatar_select_avatar_y", float, 0.5)
self.avatarSelectWheelY = get("avatar_select_wheel_y", float, 0.0)
self.avatarSelectFont = get("avatar_select_font", str, "font")
self.lobbyPanelAvatarDimension = (get("lobbyPanelAvatarWidth", float, 200.00),
get("lobbyPanelAvatarHeight", float, 110.00))
self.lobbyTitleText = get("lobbyTitleText", str, "Lobby")
self.lobbyTitleTextPos = (get("lobbyTitleTextX", str, .5),
get("lobbyTitleTextY", float, .1))
self.lobbyTitleTextAlign = eval(get("lobbyTitleTextAlign", str, "CENTER"))
self.lobbyTitleTextScale = get("lobbyTitleTextScale", float, .0025)
self.lobbyTitleTextFont = get("lobbyTitleTextFont", str, "font")
self.lobbySubtitleText = get("lobbySubtitleText", str, "Choose Your Character!")
self.lobbySubtitleTextPos = (get("lobbySubtitleTextX", float, .5),
get("lobbySubtitleTextY", float, .15))
self.lobbySubtitleTextScale = get("lobbySubtitleTextScale", float, .0015)
self.lobbySubtitleTextFont = get("lobbySubtitleTextFont", str, "font")
self.lobbySubtitleTextAlign = eval(get("lobbySubtitleTextAlign", str, "LEFT"))
self.lobbyOptionScale = get("lobbyOptionScale", float, .001)
self.lobbyOptionAlign = eval(get("lobbyOptionAlign", str, "CENTER"))
self.lobbyOptionFont = get("lobbyOptionFont", str, "font")
self.lobbyOptionPos = (get("lobbyOptionX", float, .5),
get("lobbyOptionY", float, .46))
self.lobbyOptionSpace = get("lobbyOptionSpace", float, .04)
self.lobbyOptionColor = get("lobbyOptionColor", "color", "#FFFFFF")
self.lobbySaveCharScale = get("lobbySaveCharScale", float, .001)
self.lobbySaveCharAlign = eval(get("lobbySaveCharAlign", str, "CENTER"))
self.lobbySaveCharFont = get("lobbySaveCharFont", str, "font")
self.lobbySaveCharColor = get("lobbySaveCharColor", "color", "#FFFFFF")
self.lobbyGameModePos = (get("lobbyGameModeX", float, .985),
get("lobbyGameModeY", float, .03))
self.lobbyGameModeScale = get("lobbyGameModeScale", float, .001)
self.lobbyGameModeAlign = eval(get("lobbyGameModeAlign", str, "RIGHT"))
self.lobbyGameModeFont = get("lobbyGameModeFont", str, "font")
self.lobbyGameModeColor = get("lobbyGameModeColor", "color", "#FFFFFF")
self.lobbyPanelNamePos = (get("lobbyPanelNameX", float, 0.0),
get("lobbyPanelNameY", float, 0.0))
self.lobbyPanelNameFont = get("lobbyPanelNameFont", str, "font")
self.lobbyPanelNameScale = get("lobbyPanelNameScale", float, .001)
self.lobbyPanelNameAlign = eval(get("lobbyPanelNameAlign", str, "LEFT"))
self.lobbyControlPos = (get("lobbyControlX", float, .5),
get("lobbyControlY", float, .375))
self.lobbyControlFont = get("lobbyControlFont", str, "font")
self.lobbyControlScale = get("lobbyControlScale", float, .0025)
self.lobbyControlAlign = eval(get("lobbyControlAlign", str, "CENTER"))
self.lobbyHeaderColor = get("lobbyHeaderColor", "color", "#FFFFFF")
self.lobbySelectLength = get("lobbySelectLength", int, 4)
self.lobbyPartScale = get("lobbyPartScale", float, .25)
self.lobbyPartPos = (get("lobbyPartX", float, .5),
get("lobbyPartY", float, .52))
self.lobbyControlImgScale = get("lobbyControlImgScale", float, .25)
self.lobbyControlImgPos = (get("lobbyControlImgX", float, .5),
get("lobbyControlImgY", float, .55))
self.lobbyKeyboardImgScale = get("lobbyKeyboardImgScale", float, .1)
self.lobbyKeyboardImgPos = (get("lobbyKeyboardImgX", float, .8),
get("lobbyKeyboardImgY", float, .95))
self.lobbySelectedColor = get("lobbySelectedColor", "color", "#FFFF66")
self.lobbyDisabledColor = get("lobbyDisabledColor", "color", "#BBBBBB")
self.lobbyPanelSize = (get("lobbyPanelWidth", float, .2),
get("lobbyPanelHeight", float, .8))
self.lobbyPanelPos = (get("lobbyPanelX", float, .04),
get("lobbyPanelY", float, .1))
self.lobbyPanelSpacing = get("lobbyPanelSpacing", float, .24)
self.partDiffTitleText = get("partDiffTitleText", str, "Select a Part and Difficulty")
self.partDiffTitleTextPos = (get("partDiffTitleTextX", float, .5),
get("partDiffTitleTextY", float, .1))
self.partDiffTitleTextAlign = eval(get("partDiffTitleTextAlign", str, "CENTER"))
self.partDiffTitleTextScale = get("partDiffTitleTextScale", float, .0025)
self.partDiffTitleTextFont = get("partDiffTitleTextFont", str, "font")
self.partDiffSubtitleText = get("partDiffSubtitleText", str, "Ready to Play!")
self.partDiffSubtitleTextPos = (get("partDiffSubtitleX", float, .5),
get("partDiffSubtitleY", float, .15))
self.partDiffSubtitleTextAlign = eval(get("partDiffSubtitleTextAlign", str, "CENTER"))
self.partDiffSubtitleTextScale = get("partDiffSubtitleTextScale", float, .0015)
self.partDiffSubtitleTextFont = get("partDiffSubtitleTextFont", str, "font")
self.partDiffOptionScale = get("partDiffOptionScale", float, .001)
self.partDiffOptionAlign = eval(get("partDiffOptionAlign", str, "CENTER"))
self.partDiffOptionFont = get("partDiffOptionFont", str, "font")
self.partDiffOptionPos = (get("partDiffOptionX", float, .5),
get("partDiffOptionY", float, .46))
self.partDiffOptionSpace = get("partDiffOptionScale", float, .04)
self.partDiffOptionColor = get("partDiffOptionColor", "color", "#FFFFFF")
self.partDiffSelectedColor = get("partDiffSelectedColor", "color", "#FFFF66")
self.partDiffGameModeScale = get("partDiffGameModeScale", float, .001)
self.partDiffGameModeAlign = eval(get("partDiffGameModeAlign", str, "RIGHT"))
self.partDiffGameModeFont = get("partDiffGameModeFont", str, "font")
self.partDiffGameModePos = (get("partDiffGameModeX", float, .985),
get("partDiffGameModeY", float, .03))
self.partDiffGameModeColor = get("partDiffGameModeColor", "color", "#FFFFFF")
self.partDiffPanelNameScale = get("partDiffPanelNameScale", float, .001)
self.partDiffPanelNameAlign = eval(get("partDiffPanelNameAlign", str, "LEFT"))
self.partDiffPanelNameFont = get("partDiffPanelNameFont", str, "font")
self.partDiffPanelNamePos = (get("partDiffPanelNameX", float, 0.0),
get("partDiffPanelNameY", float, 0.0))
self.partDiffControlScale = get("partDiffControlScale", float, .0025)
self.partDiffControlAlign = eval(get("partDiffControlAlign", str, "CENTER"))
self.partDiffControlFont = get("partDiffControlFont", str, "font")
self.partDiffControlPos = (get("partDiffControlX", float, .5),
get("partDiffControlY", float, .375))
self.partDiffHeaderColor = get("partDiffHeaderColor", "color", "#FFFFFF")
self.partDiffPartScale = get("partDiffPartScale", float, .5)
self.partDiffPartPos = (get("partDiffPartX", float, .5),
get("partDiffpartY", float, .65))
self.partDiffKeyboardImgScale = get("partDiffKeyboardImgScale", float, .5)
self.partDiffKeyboardImgPos = (get("partDiffKeyboardImgX", float, .8),
get("partDiffKeyboardImgY", float, .95))
self.partDiffPanelSpacing = get("partDiffPanelSpacing", float, .24)
self.partDiffPanelPos = (get("partDiffPanelX", float, .8),
get("partDiffPanelY", float, .95))
self.partDiffPanelSize = (get("partDiffPanelWidth", float, .2),
get("partDiffPanelHeight", float, .8))
#Vocal mode
self.vocalMeterSize = get("vocal_meter_size", float, 45.000)
self.vocalMeterX = get("vocal_meter_x", float, .25)
self.vocalMeterY = get("vocal_meter_y", float, .8)
self.vocalMultX = get("vocal_mult_x", float, .28)
self.vocalMultY = get("vocal_mult_y", float, .8)
self.vocalPowerX = get("vocal_power_x", float, .5)
self.vocalPowerY = get("vocal_power_y", float, .8)
self.vocalFillupCenterX = get("vocal_fillup_center_x", int, 139)
self.vocalFillupCenterY = get("vocal_fillup_center_y", int, 151)
self.vocalFillupInRadius = get("vocal_fillup_in_radius", int, 25)
self.vocalFillupOutRadius = get("vocal_fillup_out_radius", int, 139)
self.vocalFillupFactor = get("vocal_fillup_factor", float, 300.000)
self.vocalFillupColor = get("vocal_fillup_color", "color", "#DFDFDE")
self.vocalCircularFillup = get("vocal_circular_fillup", bool, True)
self.vocalLaneSize = get("vocal_lane_size", float, .002)
self.vocalGlowSize = get("vocal_glow_size", float, .012)
self.vocalGlowFade = get("vocal_glow_fade", float, .6)
self.vocalLaneColor = get("vocal_lane_color", "color", "#99FF80")
self.vocalShadowColor = get("vocal_shadow_color", "color", "#CCFFBF")
self.vocalGlowColor = get("vocal_glow_color", "color", "#33FF00")
self.vocalLaneColorStar = get("vocal_lane_color_star", "color", "#FFFF80")
self.vocalShadowColorStar = get("vocal_shadow_color_star", "color", "#FFFFBF")
self.vocalGlowColorStar = get("vocal_glow_color_star", "color", "#FFFF00")
#3D Note/Fret rendering system
self.twoDnote = get("twoDnote", bool, True)
self.twoDkeys = get("twoDkeys", bool, True)
self.threeDspin = get("threeDspin", bool, False)
self.noterot = [get("noterot"+str(i+1), float, 0) for i in range(5)]
self.keyrot = [get("keyrot"+str(i+1), float, 0) for i in range(5)]
self.drumnoterot = [get("drumnoterot"+str(i+1), float, 0) for i in range(5)]
self.drumkeyrot = [get("drumkeyrot"+str(i+1), float, 0) for i in range(5)]
self.notepos = [get("notepos"+str(i+1), float, 0) for i in range(5)]
self.keypos = [get("keypos"+str(i+1), float, 0) for i in range(5)]
self.drumnotepos = [get("drumnotepos"+str(i+1), float, 0) for i in range(5)]
self.drumkeypos = [get("drumkeypos"+str(i+1), float, 0) for i in range(5)]
self.shaderSolocolor = get("shaderSoloColor", "color", "#0000FF")
#In-game rendering
self.hopoIndicatorX = get("hopo_indicator_x")
self.hopoIndicatorY = get("hopo_indicator_y")
self.hopoIndicatorActiveColor = get("hopo_indicator_active_color", "color", "#FFFFFF")
self.hopoIndicatorInactiveColor = get("hopo_indicator_inactive_color", "color", "#666666")
self.markSolos = get("mark_solo_sections", int, 2)
self.ingame_stats_colorVar = get("ingame_stats_color", "color", "#FFFFFF")
#Game results scene
self.result_score = get("result_score", str, ".5,.11,0.0025,None,None").split(",")
self.result_star = get("result_star", str, ".5,.4,0.15,1.1").split(",")
self.result_song = get("result_song", str, ".05,.045,.002,None,None").split(",")
self.result_song_form = get("result_song_form", int, 0)
self.result_song_text = get("result_song_text", str, "%s Finished!").strip()
self.result_stats_part = get("result_stats_part", str, ".5,.64,0.002,None,None").split(",")
self.result_stats_part_text = get("result_stats_part_text", str, "Part: %s").strip()
self.result_stats_name = get("result_stats_name", str, ".5,.73,0.002,None,None").split(",")
self.result_stats_diff = get("result_stats_diff", str, ".5,.55,0.002,None,None").split(",")
self.result_stats_diff_text = get("result_stats_diff_text", str, "Difficulty: %s").strip()
self.result_stats_accuracy = get("result_stats_accuracy", str, ".5,.61,0.002,None,None").split(",")
self.result_stats_accuracy_text = get("result_stats_accuracy_text", str, "Accuracy: %.1f%%").strip()
self.result_stats_streak = get("result_stats_streak", str, ".5,.58,0.002,None,None").split(",")
self.result_stats_streak_text = get("result_stats_streak_text", str, "Long Streak: %s").strip()
self.result_stats_notes = get("result_stats_notes", str, ".5,.52,0.002,None,None").split(",")
self.result_stats_notes_text = get("result_stats_notes_text", str, "%s Notes Hit").strip()
self.result_cheats_info = get("result_cheats_info", str, ".5,.3,.002").split(",")
self.result_cheats_numbers = get("result_cheats_numbers", str, ".5,.35,.0015").split(",")
self.result_cheats_percent = get("result_cheats_percent", str, ".45,.4,.0015").split(",")
self.result_cheats_score = get("result_cheats_score", str, ".75,.4,.0015").split(",")
self.result_cheats_color = get("result_cheats_color", "color", "#FFFFFF")
self.result_cheats_font = get("result_cheats_font", str, "font")
self.result_high_score_font = get("result_high_score_font", str, "font")
self.result_menu_x = get("result_menu_x", float, .5)
self.result_menu_y = get("result_menu_y", float, .2)
self.result_star_type = get("result_star_type", int, 0)
#Submenus
allfiles = os.listdir(os.path.join(self.themePath,"menu"))
self.submenuScale = {}
self.submenuX = {}
self.submenuY = {}
self.submenuVSpace = {}
listmenu = []
for name in allfiles:
if name.find("text") > -1:
found = os.path.splitext(name)[0]
if found == "maintext":
continue
Config.define("theme", found, str, None)
self.submenuScale[found] = None
self.submenuX[found] = None
self.submenuY[found] = None
self.submenuVSpace[found] = None
listmenu.append(found)
for i in listmenu:
if i == "maintext":
continue
if self.submenuX[i]:
self.submenuX[i] = get(i).split(",")[0].strip()
if self.submenuY[i]:
self.submenuY[i] = get(i).split(",")[1].strip()
if self.submenuScale[i]:
self.submenuScale[i] = get(i).split(",")[2].strip()
if self.submenuVSpace[i]:
self.submenuVSpace[i] = get(i).split(",")[3].strip()
def setSelectedColor(self, alpha = 1.0):
glColor4f(*(self.selectedColor + (alpha,)))
def setBaseColor(self, alpha = 1.0):
glColor4f(*(self.baseColor + (alpha,)))
def hexToColorResults(self, color):
if isinstance(color, tuple):
return color
elif color is None:
return self.baseColor
color = color.strip()
if color[0] == "#":
color = color[1:]
if len(color) == 3:
return (int(color[0], 16) / 15.0, int(color[1], 16) / 15.0, int(color[2], 16) / 15.0)
return (int(color[0:2], 16) / 255.0, int(color[2:4], 16) / 255.0, int(color[4:6], 16) / 255.0)
return self.baseColor
@staticmethod
def hexToColor(color):
if isinstance(color, tuple):
return color
elif color is None:
return (0,0,0)
if color[0] == "#":
color = color[1:]
if len(color) == 3:
return (int(color[0], 16) / 15.0, int(color[1], 16) / 15.0, int(color[2], 16) / 15.0)
elif len(color) == 4:
return (int(color[0], 16) / 15.0, int(color[1], 16) / 15.0, int(color[2], 16) / 15.0, int(color[2], 16) / 15.0)
return (int(color[0:2], 16) / 255.0, int(color[2:4], 16) / 255.0, int(color[4:6], 16) / 255.0)
elif color.lower() == "off":
return (-1, -1, -1)
elif color.lower() == "fret":
return (-2, -2, -2)
return (0, 0, 0)
def rgbToColor(self, color):
retVal = []
for c in color:
if isinstance(c, int) and c > 1:
retVal.append(float(c)/255.0)
return tuple(retVal)
@staticmethod
def colorToHex(color):
if isinstance(color, str):
return color
return "#" + ("".join(["%02x" % int(c * 255) for c in color]))
def packTupleKey(self, key, type = str):
vals = key.split(',')
if isinstance(type, list):
retval = tuple(type[i](n.strip()) for i, n in enumerate(vals))
else:
retval = tuple(type(n.strip()) for n in vals)
return retval
def fadeScreen(self, v):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
glBegin(GL_TRIANGLE_STRIP)
glColor4f(0, 0, 0, .3 - v * .3)
glVertex2f(0, 0)
glColor4f(0, 0, 0, .3 - v * .3)
glVertex2f(1, 0)
glColor4f(0, 0, 0, .9 - v * .9)
glVertex2f(0, 1)
glColor4f(0, 0, 0, .9 - v * .9)
glVertex2f(1, 1)
glEnd()
def loadThemeModule(self, moduleName):
try:
fp, pathname, description = imp.find_module(moduleName,[self.path])
module = imp.load_module(moduleName, fp, pathname, description)
if moduleName in ["CustomLobby", "ThemeLobby"]:
return module.CustomLobby(self)
elif moduleName in ["CustomSetlist", "Setlist"]:
return module.CustomSetlist(self)
elif moduleName in ["CustomParts", "ThemeParts"]:
return module.CustomParts(self)
else:
return None
except ImportError:
if moduleName in ["CustomLobby", "ThemeLobby"]:
return ThemeLobby(self)
elif moduleName in ["CustomSetlist", "Setlist"]:
return Setlist(self)
elif moduleName in ["CustomParts", "ThemeParts"]:
return ThemeParts(self)
else:
return None
def run(self, ticks):
pass
class ThemeLobby:
def __init__(self, theme):
self.theme = theme
self.currentImage = -1
self.nextImage = 0
self.fadeTime = 2500
def run(self, ticks, lobby):
self.fadeTime += ticks
if self.fadeTime >= 2500:
self.fadeTime -= 2500
self.currentImage = (self.currentImage + 1)%4
i = self.currentImage
while not lobby.partImages[self.currentImage]:
self.currentImage = (self.currentImage + 1)%4
if i == self.currentImage:
break
if lobby.partImages[self.currentImage]:
self.nextImage = (self.currentImage + 1)%4
i = self.nextImage
while not lobby.partImages[self.nextImage]:
self.nextImage = (self.nextImage + 1)%4
if i == self.nextImage:
break
def drawPartImage(self, lobby, type, scale, coord):
if not lobby.partImages[self.currentImage]:
return
if type in GUITARTYPES:
if self.fadeTime < 1000 or self.nextImage == self.currentImage:
lobby.drawImage(lobby.partImages[self.currentImage], scale = scale, coord = coord)
else:
lobby.drawImage(lobby.partImages[self.currentImage], scale = scale, coord = coord, color = (1,1,1,((2500.0-self.fadeTime)/1500.0)))
lobby.drawImage(lobby.partImages[self.nextImage], scale = scale, coord = coord, color = (1,1,1,((self.fadeTime-1000.0)/1500.0)))
glColor4f(1,1,1,1)
elif type in DRUMTYPES:
if lobby.partImages[4]:
lobby.drawImage(lobby.partImages[4], scale = scale, coord = coord)
else:
if lobby.partImages[5]:
lobby.drawImage(lobby.partImages[5], scale = scale, coord = coord)
def renderPanels(self, lobby):
x = self.theme.lobbyPanelPos[0]
y = self.theme.lobbyPanelPos[1]
w, h = lobby.geometry
font = lobby.fontDict['font']
controlFont = lobby.fontDict[self.theme.lobbyControlFont]
panelNameFont = lobby.fontDict[self.theme.lobbyPanelNameFont]
optionFont = lobby.fontDict[self.theme.lobbyOptionFont]
wP = w*self.theme.lobbyPanelSize[0]
hP = h*self.theme.lobbyPanelSize[1]
glColor3f(*self.theme.lobbyHeaderColor)
if self.theme.lobbyTitleText:
lobby.fontDict[self.theme.lobbyTitleTextFont].render(self.theme.lobbyTitleText, self.theme.lobbyTitleTextPos, scale = self.theme.lobbyTitleTextScale, align = self.theme.lobbyTitleTextAlign)
if self.theme.lobbySubtitleText:
lobby.fontDict[self.theme.lobbySubtitleTextFont].render(self.theme.lobbySubtitleText, self.theme.lobbySubtitleTextPos, scale = self.theme.lobbySubtitleTextScale, align = self.theme.lobbySubtitleTextAlign)
lobby.fontDict[self.theme.lobbyGameModeFont].render(lobby.gameModeText, self.theme.lobbyGameModePos, scale = self.theme.lobbyGameModeScale, align = self.theme.lobbyGameModeAlign)
for i in range(4):
j = lobby.panelOrder[i]
if j in lobby.blockedPlayers or len(lobby.selectedPlayers) == lobby.maxPlayers:
glColor3f(*self.theme.lobbyDisabledColor)
else:
glColor3f(*self.theme.lobbyHeaderColor)
if i == lobby.keyControl and lobby.img_keyboard_panel:
lobby.drawImage(lobby.img_keyboard_panel, scale = (self.theme.lobbyPanelSize[0], -self.theme.lobbyPanelSize[1]), coord = (wP*.5+w*x,hP*.5+h*y), stretched = 3)
elif lobby.img_panel:
lobby.drawImage(lobby.img_panel, scale = (self.theme.lobbyPanelSize[0], -self.theme.lobbyPanelSize[1]), coord = (wP*.5+w*x,hP*.5+h*y), stretched = 3)
if i == lobby.keyControl and lobby.img_keyboard:
lobby.drawImage(lobby.img_keyboard, scale = (self.theme.lobbyKeyboardImgScale, -self.theme.lobbyKeyboardImgScale), coord = (wP*self.theme.lobbyKeyboardImgPos[0]+w*x, hP*self.theme.lobbyKeyboardImgPos[1]+h*y))
controlFont.render(lobby.controls[j], (self.theme.lobbyPanelSize[0]*self.theme.lobbyControlPos[0]+x, self.theme.lobbyPanelSize[1]*self.theme.lobbyControlPos[1]+y), scale = self.theme.lobbyControlScale, align = self.theme.lobbyControlAlign, new = True)
self.drawPartImage(lobby, lobby.types[j], scale = (self.theme.lobbyPartScale, -self.theme.lobbyPartScale), coord = (wP*self.theme.lobbyPartPos[0]+w*x, hP*self.theme.lobbyPartPos[1]+h*y))
#self.drawControlImage(lobby, lobby.types[j], scale = (self.theme.lobbyControlImgScale, -self.theme.lobbyControlImgScale), coord = (wP*self.theme.lobbyControlImgPos[0]+w*x, hP*self.theme.lobbyControlImgPos[1]+h*y))
panelNameFont.render(lobby.options[lobby.selected[j]].lower(), (x+w*self.theme.lobbyPanelNamePos[0], y+h*self.theme.lobbyPanelNamePos[1]), scale = self.theme.lobbyPanelNameScale, align = self.theme.lobbyPanelNameAlign, new = True)
for l, k in enumerate(range(lobby.pos[j][0], lobby.pos[j][1]+1)):
if k >= len(lobby.options):
break
if lobby.selected[j] == k and (j not in lobby.blockedPlayers or j in lobby.selectedPlayers):
if lobby.img_selected:
lobby.drawImage(lobby.img_selected, scale = (.5, -.5), coord = (wP*.5+w*x, hP*(.46*.75)+h*y-(h*.04*l)/.75))
if lobby.avatars[k]:
lobby.drawImage(lobby.avatars[k], scale = (lobby.avatarScale[k], -lobby.avatarScale[k]), coord = (wP*.5+w*x, hP*.7+h*y))
elif k == 0 and lobby.img_newchar_av:
lobby.drawImage(lobby.img_newchar_av, scale = (lobby.newCharAvScale, -lobby.newCharAvScale), coord = (wP*.5+w*x, hP*.7+h*y))
elif lobby.img_default_av:
lobby.drawImage(lobby.img_default_av, scale = (lobby.defaultAvScale, -lobby.defaultAvScale), coord = (wP*.5+w*x, hP*.7+h*y))
glColor3f(*self.theme.lobbySelectedColor)
elif k in lobby.blockedItems or j in lobby.blockedPlayers:
glColor3f(*self.theme.lobbyDisabledColor)
else:
glColor3f(*self.theme.lobbyOptionColor)
if k == 1:
if lobby.img_save_char:
lobby.drawImage(lobby.img_save_char, scale = (.5, -.5), coord = (wP*.5+w*x, hP*(.46*.75)+h*y-(h*.04*l)/.75))
else:
glColor3f(*self.theme.lobbySaveCharColor)
lobby.fontDict[self.theme.lobbySaveCharFont].render(lobby.options[k], (self.theme.lobbyPanelSize[0]*self.theme.lobbyOptionPos[0]+x,self.theme.lobbyPanelSize[1]*self.theme.lobbyOptionPos[1]+y+self.theme.lobbyOptionSpace*l), scale = self.theme.lobbySaveCharScale, align = self.theme.lobbySaveCharAlign, new = True)
else:
optionFont.render(lobby.options[k], (self.theme.lobbyPanelSize[0]*self.theme.lobbyOptionPos[0]+x,self.theme.lobbyPanelSize[1]*self.theme.lobbyOptionPos[1]+y+self.theme.lobbyOptionSpace*l), scale = self.theme.lobbyOptionScale, align = self.theme.lobbyOptionAlign, new = True)
x += self.theme.lobbyPanelSpacing
class ThemeParts:
def __init__(self, theme):
self.theme = theme
def run(self, ticks):
pass
def drawPartImage(self, dialog, part, scale, coord):
if part in [0, 2, 4, 5]:
if dialog.partImages[part]:
dialog.drawImage(dialog.partImages[part], scale = scale, coord = coord)
else:
if dialog.partImages[part]:
dialog.drawImage(dialog.partImages[part], scale = scale, coord = coord)
else:
if dialog.partImages[0]:
dialog.drawImage(dialog.partImages[0], scale = scale, coord = coord)
def renderPanels(self, dialog):
x = self.theme.partDiffPanelPos[0]
y = self.theme.partDiffPanelPos[1]
w, h = dialog.geometry
font = dialog.fontDict['font']
controlFont = dialog.fontDict[self.theme.partDiffControlFont]
panelNameFont = dialog.fontDict[self.theme.partDiffPanelNameFont]
optionFont = dialog.fontDict[self.theme.partDiffOptionFont]
wP = w*self.theme.partDiffPanelSize[0]
hP = h*self.theme.partDiffPanelSize[1]
glColor3f(*self.theme.partDiffHeaderColor)
if self.theme.partDiffTitleText:
dialog.fontDict[self.theme.partDiffTitleTextFont].render(self.theme.partDiffTitleText, self.theme.partDiffTitleTextPos, scale = self.theme.partDiffTitleTextScale, align = self.theme.partDiffTitleTextAlign)
if self.theme.partDiffSubtitleText:
dialog.fontDict[self.theme.partDiffSubtitleTextFont].render(self.theme.partDiffSubtitleText, self.theme.partDiffSubtitleTextPos, scale = self.theme.partDiffSubtitleTextScale, align = self.theme.partDiffSubtitleTextAlign)
for i in range(len(dialog.players)):
glColor3f(*self.theme.partDiffHeaderColor)
dialog.fontDict[self.theme.partDiffGameModeFont].render(dialog.gameModeText, self.theme.partDiffGameModePos, scale = self.theme.partDiffGameModeScale, align = self.theme.partDiffGameModeAlign)
if i == dialog.keyControl and dialog.img_keyboard_panel:
dialog.drawImage(dialog.img_keyboard_panel, scale = (self.theme.partDiffPanelSize[0], -self.theme.partDiffPanelSize[1]), coord = (wP*.5+w*x,hP*.5+h*y), stretched = 3)
elif dialog.img_panel:
dialog.drawImage(dialog.img_panel, scale = (self.theme.partDiffPanelSize[0], -self.theme.partDiffPanelSize[1]), coord = (wP*.5+w*x,hP*.5+h*y), stretched = 3)
if i == dialog.keyControl and dialog.img_keyboard:
dialog.drawImage(dialog.img_keyboard, scale = (self.theme.partDiffKeyboardImgScale, -self.theme.partDiffKeyboardImgScale), coord = (wP*self.theme.partDiffKeyboardImgPos[0]+w*x, hP*self.theme.partDiffKeyboardImgPos[1]+h*y))
controlFont.render(dialog.players[i].name, (self.theme.partDiffPanelSize[0]*self.theme.partDiffControlPos[0]+x, self.theme.partDiffPanelSize[1]*self.theme.partDiffControlPos[1]+y), scale = self.theme.partDiffControlScale, align = self.theme.partDiffControlAlign, new = True)
panelNameFont.render(dialog.players[i].name.lower(), (x+w*self.theme.partDiffPanelNamePos[0], y+h*self.theme.partDiffPanelNamePos[1]), scale = self.theme.partDiffPanelNameScale, align = self.theme.partDiffPanelNameAlign, new = True)
if dialog.mode[i] == 0:
self.drawPartImage(dialog, dialog.parts[i][dialog.selected[i]].id, scale = (self.theme.partDiffPartScale, -self.theme.partDiffPartScale), coord = (wP*self.theme.partDiffPartPos[0]+w*x, hP*self.theme.partDiffPartPos[1]+h*y))
for p in range(len(dialog.parts[i])):
if dialog.selected[i] == p:
if dialog.img_selected:
dialog.drawImage(dialog.img_selected, scale = (.5, -.5), coord = (wP*.5+w*x, hP*(.46*.75)+h*y-(h*.04*p)/.75))
glColor3f(*self.theme.partDiffSelectedColor)
else:
glColor3f(*self.theme.partDiffOptionColor)
font.render(str(dialog.parts[i][p]), (.2*.5+x,.8*.46+y+.04*p), scale = .001, align = 1, new = True)
elif dialog.mode[i] == 1:
self.drawPartImage(dialog, dialog.players[i].part.id, scale = (self.theme.partDiffPartScale, -self.theme.partDiffPartScale), coord = (wP*self.theme.partDiffPartPos[0]+w*x, hP*self.theme.partDiffPartPos[1]+h*y))
for d in range(len(dialog.info.partDifficulties[dialog.players[i].part.id])):
if dialog.selected[i] == d:
if dialog.img_selected:
dialog.drawImage(dialog.img_selected, scale = (.5, -.5), coord = (wP*.5+w*x, hP*(.46*.75)+h*y-(h*.04*d)/.75))
glColor3f(*self.theme.partDiffSelectedColor)
else:
glColor3f(*self.theme.partDiffOptionColor)
font.render(str(dialog.info.partDifficulties[dialog.players[i].part.id][d]), (.2*.5+x,.8*.46+y+.04*d), scale = .001, align = 1, new = True)
if i in dialog.readyPlayers:
if dialog.img_ready:
dialog.drawImage(dialog.img_ready, scale = (.5, -.5), coord = (wP*.5+w*x,hP*(.75*.46)+h*y))
x += .24
class Setlist:
def __init__(self, theme):
self.theme = theme
self.setlist_type = theme.songListDisplay
if self.setlist_type is None:
self.setlist_type = 1
if self.setlist_type == 0: #CD mode
self.setlistStyle = 0
self.headerSkip = 0
self.footerSkip = 0
self.labelType = 1
self.labelDistance = 2
self.showMoreLabels = True
self.texturedLabels = True
self.itemsPerPage = 1
self.showLockedSongs = False
self.showSortTiers = True
self.selectTiers = False
self.itemSize = (0,.125)
elif self.setlist_type == 1: #List mode
self.setlistStyle = 1
self.headerSkip = 2
self.footerSkip = 1
self.labelType = 0
self.labelDistance = 0
self.showMoreLabels = False
self.texturedLabels = False
self.itemsPerPage = 7
self.showLockedSongs = False
self.showSortTiers = True
self.selectTiers = False
self.itemSize = (0,.126)
elif self.setlist_type == 2: #List/CD mode
self.setlistStyle = 1
self.headerSkip = 0
self.footerSkip = 1
self.labelType = 1
self.labelDistance = 1
self.showMoreLabels = False
self.texturedLabels = True
self.itemsPerPage = 8
self.showLockedSongs = False
self.showSortTiers = True
self.selectTiers = False
self.itemSize = (0,.125)
else: #RB2 mode
self.setlistStyle = 0
self.headerSkip = 0
self.footerSkip = 0
self.labelType = 0
self.labelDistance = 1
self.showMoreLabels = False
self.texturedLabels = False
self.itemsPerPage = 12
self.showLockedSongs = True
self.showSortTiers = True
self.selectTiers = False
self.itemSize = (0,.07)
self.career_title_color = self.theme.career_title_colorVar
self.song_name_text_color = self.theme.song_name_text_colorVar
self.song_name_selected_color = self.theme.song_name_selected_colorVar
self.song_rb2_diff_color = self.theme.song_rb2_diff_colorVar
self.artist_text_color = self.theme.artist_text_colorVar
self.artist_selected_color = self.theme.artist_selected_colorVar
self.library_text_color = self.theme.library_text_colorVar
self.library_selected_color = self.theme.library_selected_colorVar
self.songlist_score_color = self.theme.songlist_score_colorVar
self.songlistcd_score_color = self.theme.songlistcd_score_colorVar
self.song_cd_xpos = theme.song_cd_Xpos
self.song_cdscore_xpos = theme.song_cdscore_Xpos
self.song_list_xpos = theme.song_list_Xpos
self.song_listscore_xpos = theme.song_listscore_Xpos
self.song_listcd_list_xpos = theme.song_listcd_list_Xpos
self.song_listcd_cd_xpos = theme.song_listcd_cd_Xpos
self.song_listcd_cd_ypos = theme.song_listcd_cd_Ypos
self.song_listcd_score_xpos = theme.song_listcd_score_Xpos
self.song_listcd_score_ypos = theme.song_listcd_score_Ypos
def run(self, ticks):
pass
def renderHeader(self, scene):
pass
def renderUnselectedItem(self, scene, i, n):
w, h = scene.geometry
font = scene.fontDict['songListFont']
lfont = scene.fontDict['songListFont']
sfont = scene.fontDict['shadowFont']
if self.setlist_type == 0:
return
elif self.setlist_type == 1:
if not scene.items:
return
item = scene.items[i]
glColor4f(0,0,0,1)
if isinstance(item, Song.SongInfo) or isinstance(item, Song.RandomSongInfo):
c1,c2,c3 = self.song_name_text_color
glColor3f(c1,c2,c3)
elif isinstance(item, Song.LibraryInfo):
c1,c2,c3 = self.library_text_color
glColor3f(c1,c2,c3)
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
c1,c2,c3 = self.career_title_color
glColor3f(c1,c2,c3)
text = item.name
if isinstance(item, Song.SongInfo) and item.getLocked(): #TODO: SongDB
text = _("-- Locked --")
if isinstance(item, Song.SongInfo): #MFH - add indentation when tier sorting
if scene.tiersPresent:
text = " " + text
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = string.upper(text)
scale = lfont.scaleText(text, maxwidth = 0.440)
lfont.render(text, (self.song_list_xpos, .0925*(n+1)-.0375), scale = scale)
#MFH - Song list score / info display:
if isinstance(item, Song.SongInfo) and not item.getLocked():
scale = 0.0009
text = scene.scoreDifficulty.text
c1,c2,c3 = self.songlist_score_color
glColor3f(c1,c2,c3)
lfont.render(text, (self.song_listscore_xpos, .0925*(n+1)-.034), scale=scale, align = 2)
if not item.frets == "":
suffix = ", ("+item.frets+")"
else:
suffix = ""
if not item.year == "":
yeartag = ", "+item.year
else:
yeartag = ""
scale = .0014
c1,c2,c3 = self.artist_text_color
glColor3f(c1,c2,c3)
# evilynux - Force uppercase display for artist name
text = string.upper(item.artist)+suffix+yeartag
# evilynux - automatically scale artist name and year
scale = lfont.scaleText(text, maxwidth = 0.440, scale = scale)
if scale > .0014:
scale = .0014
lfont.render(text, (self.song_list_xpos+.05, .0925*(n+1)+.0125), scale=scale)
score = _("Nil")
stars = 0
name = ""
try:
difficulties = item.partDifficulties[scene.scorePart.id]
except KeyError:
difficulties = []
for d in difficulties:
if d.id == scene.scoreDifficulty.id:
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
else:
score, stars, name = 0, 0, "---"
if score == _("Nil") and scene.nilShowNextScore: #MFH
for d in difficulties: #MFH - just take the first valid difficulty you can find and display it.
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
break
else:
score, stars, name = 0, 0, "---"
else:
score, stars, name = _("Nil"), 0, "---"
starx = self.song_listscore_xpos+.01
stary = .0925*(n+1)-0.039
starscale = 0.03
stary = 1.0 - (stary / scene.fontScreenBottom)
scene.drawStarScore(w, h, starx, stary - h/2, stars, starscale, horiz_spacing = 1.0, hqStar = True) #MFH
scale = 0.0014
# evilynux - score color
c1,c2,c3 = self.songlist_score_color
glColor3f(c1,c2,c3)
# evilynux - hit% and note streak only if enabled
if score is not _("Nil") and score > 0 and notesTotal != 0:
text = "%.1f%% (%d)" % ((float(notesHit) / notesTotal) * 100.0, noteStreak)
lfont.render(text, (self.song_listscore_xpos+.1, .0925*(n+1)-.015), scale=scale, align = 2)
text = str(score)
lfont.render(text, (self.song_listscore_xpos+.1, .0925*(n+1)+.0125), scale=scale*1.28, align = 2)
elif self.setlist_type == 2: #old list/cd
y = h*(.87-(.1*n))
if not scene.items:
return
item = scene.items[i]
if isinstance(item, Song.SongInfo) or isinstance(item, Song.RandomSongInfo):
c1,c2,c3 = self.song_name_text_color
glColor4f(c1,c2,c3,1)
if isinstance(item, Song.LibraryInfo):
c1,c2,c3 = self.library_text_color
glColor4f(c1,c2,c3,1)
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
c1,c2,c3 = self.career_title_color
glColor4f(c1,c2,c3,1)
text = item.name
if isinstance(item, Song.SongInfo) and item.getLocked():
text = _("-- Locked --")
if isinstance(item, Song.SongInfo): #MFH - add indentation when tier sorting
if scene.tiersPresent:
text = " " + text
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = string.upper(text)
scale = font.scaleText(text, maxwidth = 0.45)
font.render(text, (self.song_listcd_list_xpos, .09*(n+1)), scale = scale)
if isinstance(item, Song.SongInfo) and not item.getLocked():
if not item.frets == "":
suffix = ", ("+item.frets+")"
else:
suffix = ""
if not item.year == "":
yeartag = ", "+item.year
else:
yeartag = ""
scale = .0014
c1,c2,c3 = self.artist_text_color
glColor4f(c1,c2,c3,1)
text = string.upper(item.artist)+suffix+yeartag
scale = font.scaleText(text, maxwidth = 0.4, scale = scale)
font.render(text, (self.song_listcd_list_xpos + .05, .09*(n+1)+.05), scale=scale)
elif self.setlist_type == 3: #old rb2
font = scene.fontDict['songListFont']
if not scene.items or scene.itemIcons is None:
return
item = scene.items[i]
y = h*(.7825-(.0459*(n+1)))
if scene.img_tier:
imgwidth = scene.img_tier.width1()
imgheight = scene.img_tier.height1()
wfactor = 381.1/imgwidth
hfactor = 24.000/imgheight
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo) and scene.img_tier:
scene.drawImage(scene.img_tier, scale = (wfactor,-hfactor), coord = (w/1.587, h-((0.055*h)*(n+1))-(0.219*h)))
icon = None
if isinstance(item, Song.SongInfo):
if item.icon != "":
try:
icon = scene.itemIcons[item.icon]
imgwidth = icon.width1()
wfactor = 23.000/imgwidth
scene.drawImage(icon, scale = (wfactor,-wfactor), coord = (w/2.86, h-((0.055*h)*(n+1))-(0.219*h)))
except KeyError:
pass
elif isinstance(item, Song.LibraryInfo):
try:
icon = scene.itemIcons["Library"]
imgwidth = icon.width1()
wfactor = 23.000/imgwidth
scene.drawImage(icon, scale = (wfactor,-wfactor), coord = (w/2.86, h-((0.055*h)*(n+1))-(0.219*h)))
except KeyError:
pass
elif isinstance(item, Song.RandomSongInfo):
try:
icon = scene.itemIcons["Random"]
imgwidth = icon.width1()
wfactor = 23.000/imgwidth
scene.drawImage(icon, scale = (wfactor,-wfactor), coord = (w/2.86, h-((0.055*h)*(n+1))-(0.219*h)))
except KeyError:
pass
if isinstance(item, Song.SongInfo) or isinstance(item, Song.LibraryInfo):
c1,c2,c3 = self.song_name_text_color
glColor4f(c1,c2,c3,1)
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
c1,c2,c3 = self.career_title_color
glColor4f(c1,c2,c3,1)
elif isinstance(item, Song.RandomSongInfo):
c1,c2,c3 = self.song_name_text_color
glColor4f(c1,c2,c3,1)
text = item.name
if isinstance(item, Song.SongInfo) and item.getLocked():
text = _("-- Locked --")
if isinstance(item, Song.SongInfo): #MFH - add indentation when tier sorting
if scene.tiersPresent or icon:
text = " " + text
# evilynux - Force uppercase display for Career titles
maxwidth = .55
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = string.upper(text)
scale = .0015
wt, ht = font.getStringSize(text, scale = scale)
while wt > maxwidth:
tlength = len(text) - 4
text = text[:tlength] + "..."
wt, ht = font.getStringSize(text, scale = scale)
if wt < .45:
break
font.render(text, (.35, .0413*(n+1)+.15), scale = scale)
if isinstance(item, Song.SongInfo):
score = _("Nil")
stars = 0
name = ""
if not item.getLocked():
try:
difficulties = item.partDifficulties[scene.scorePart.id]
except KeyError:
difficulties = []
for d in difficulties:
if d.id == scene.scoreDifficulty.id:
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
break
else:
score, stars, name = 0, 0, "---"
if score == _("Nil") and scene.nilShowNextScore: #MFH
for d in difficulties: #MFH - just take the first valid difficulty you can find and display it.
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
break
else:
score, stars, name = 0, 0, "---"
else:
score, stars, name = _("Nil"), 0, "---"
#evilynux - hit% and note streak if enabled
scale = 0.0009
if score is not _("Nil") and score > 0 and notesTotal != 0:
text = "%.1f%% (%d)" % ((float(notesHit) / notesTotal) * 100.0, noteStreak)
font.render(text, (.92, .0413*(n+1)+.163), scale=scale, align = 2)
text = str(score)
font.render(text, (.92, .0413*(n+1)+.15), scale=scale, align = 2)
def renderSelectedItem(self, scene, n):
w, h = scene.geometry
font = scene.fontDict['songListFont']
lfont = scene.fontDict['songListFont']
sfont = scene.fontDict['shadowFont']
item = scene.selectedItem
if not item:
return
if isinstance(item, Song.BlankSpaceInfo):
return
if self.setlist_type == 0:
return
elif self.setlist_type == 1:
y = h*(.88-(.125*n))
if scene.img_item_select:
wfactor = scene.img_item_select.widthf(pixelw = 635.000)
scene.drawImage(scene.img_item_select, scale = (wfactor,-wfactor), coord = (w/2.1, y))
glColor4f(0,0,0,1)
if isinstance(item, Song.SongInfo) or isinstance(item, Song.RandomSongInfo):
c1,c2,c3 = self.song_name_selected_color
glColor3f(c1,c2,c3)
elif isinstance(item, Song.LibraryInfo):
c1,c2,c3 = self.library_selected_color
glColor3f(c1,c2,c3)
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
c1,c2,c3 = self.career_title_color
glColor3f(c1,c2,c3)
text = item.name
if isinstance(item, Song.SongInfo) and item.getLocked(): #TODO: SongDB
text = _("-- Locked --")
if isinstance(item, Song.SongInfo): #MFH - add indentation when tier sorting
if scene.tiersPresent:
text = " " + text
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = string.upper(text)
scale = sfont.scaleText(text, maxwidth = 0.440)
sfont.render(text, (self.song_list_xpos, .0925*(n+1)-.0375), scale = scale)
#MFH - Song list score / info display:
if isinstance(item, Song.SongInfo) and not item.getLocked():
scale = 0.0009
text = scene.scoreDifficulty.text
c1,c2,c3 = self.songlist_score_color
glColor3f(c1,c2,c3)
lfont.render(text, (self.song_listscore_xpos, .0925*(n+1)-.034), scale=scale, align = 2)
if not item.frets == "":
suffix = ", ("+item.frets+")"
else:
suffix = ""
if not item.year == "":
yeartag = ", "+item.year
else:
yeartag = ""
scale = .0014
c1,c2,c3 = self.artist_selected_color
glColor3f(c1,c2,c3)
# evilynux - Force uppercase display for artist name
text = string.upper(item.artist)+suffix+yeartag
# evilynux - automatically scale artist name and year
scale = lfont.scaleText(text, maxwidth = 0.440, scale = scale)
if scale > .0014:
scale = .0014
lfont.render(text, (self.song_list_xpos+.05, .0925*(n+1)+.0125), scale=scale)
score = _("Nil")
stars = 0
name = ""
try:
difficulties = item.partDifficulties[scene.scorePart.id]
except KeyError:
difficulties = []
for d in difficulties:
if d.id == scene.scoreDifficulty.id:
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
else:
score, stars, name = 0, 0, "---"
if score == _("Nil") and scene.nilShowNextScore: #MFH
for d in difficulties: #MFH - just take the first valid difficulty you can find and display it.
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
break
else:
score, stars, name = 0, 0, "---"
else:
score, stars, name = _("Nil"), 0, "---"
starx = self.song_listscore_xpos+.01
stary = .0925*(n+1)-0.039
starscale = 0.03
stary = 1.0 - (stary / scene.fontScreenBottom)
scene.drawStarScore(w, h, starx, stary - h/2, stars, starscale, horiz_spacing = 1.0, hqStar = True) #MFH
scale = 0.0014
# evilynux - score color
c1,c2,c3 = self.songlist_score_color
glColor3f(c1,c2,c3)
# evilynux - hit% and note streak only if enabled
if score is not _("Nil") and score > 0 and notesTotal != 0:
text = "%.1f%% (%d)" % ((float(notesHit) / notesTotal) * 100.0, noteStreak)
lfont.render(text, (self.song_listscore_xpos+.1, .0925*(n+1)-.015), scale=scale, align = 2)
text = str(score)
lfont.render(text, (self.song_listscore_xpos+.1, .0925*(n+1)+.0125), scale=scale*1.28, align = 2)
elif self.setlist_type == 2:
y = h*(.87-(.1*n))
glColor4f(1,1,1,1)
if scene.img_selected:
imgwidth = scene.img_selected.width1()
scene.drawImage(scene.img_selected, scale = (1, -1), coord = (self.song_listcd_list_xpos * w + (imgwidth*.64/2), y*1.2-h*.215))
text = scene.library
font.render(text, (.05, .01))
if scene.songLoader:
font.render(_("Loading Preview..."), (.05, .7), scale = 0.001)
if isinstance(item, Song.SongInfo):
c1,c2,c3 = self.song_name_selected_color
glColor4f(c1,c2,c3,1)
if item.getLocked():
text = item.getUnlockText()
elif scene.careerMode and not item.completed:
text = _("Play To Advance")
elif scene.practiceMode:
text = _("Practice")
elif item.count:
count = int(item.count)
if count == 1:
text = _("Played Once")
else:
text = _("Played %d times.") % count
else:
text = _("Quickplay")
elif isinstance(item, Song.LibraryInfo):
c1,c2,c3 = self.library_selected_color
glColor4f(c1,c2,c3,1)
if item.songCount == 1:
text = _("There Is 1 Song In This Setlist.")
elif item.songCount > 1:
text = _("There Are %d Songs In This Setlist.") % (item.songCount)
else:
text = ""
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = _("Tier")
c1,c2,c3 = self.career_title_color
glColor4f(c1,c2,c3,1)
elif isinstance(item, Song.RandomSongInfo):
text = _("Random Song")
c1,c2,c3 = self.song_name_selected_color
glColor4f(c1,c2,c3,1)
font.render(text, (self.song_listcd_score_xpos, .085), scale = 0.0012)
if isinstance(item, Song.SongInfo) or isinstance(item, Song.RandomSongInfo):
c1,c2,c3 = self.song_name_selected_color
glColor4f(c1,c2,c3,1)
elif isinstance(item, Song.LibraryInfo):
c1,c2,c3 = self.library_selected_color
glColor4f(c1,c2,c3,1)
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
c1,c2,c3 = self.career_title_color
glColor4f(c1,c2,c3,1)
text = item.name
if isinstance(item, Song.SongInfo) and item.getLocked():
text = _("-- Locked --")
if isinstance(item, Song.SongInfo): #MFH - add indentation when tier sorting
if scene.tiersPresent:
text = " " + text
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = string.upper(text)
scale = font.scaleText(text, maxwidth = 0.45)
font.render(text, (self.song_listcd_list_xpos, .09*(n+1)), scale = scale)
if isinstance(item, Song.SongInfo) and not item.getLocked():
if not item.frets == "":
suffix = ", ("+item.frets+")"
else:
suffix = ""
if not item.year == "":
yeartag = ", "+item.year
else:
yeartag = ""
scale = .0014
c1,c2,c3 = self.artist_selected_color
glColor4f(c1,c2,c3,1)
text = string.upper(item.artist)+suffix+yeartag
scale = font.scaleText(text, maxwidth = 0.4, scale = scale)
font.render(text, (self.song_listcd_list_xpos + .05, .09*(n+1)+.05), scale=scale)
elif self.setlist_type == 3:
y = h*(.7825-(.0459*(n)))
if scene.img_tier:
imgwidth = scene.img_tier.width1()
imgheight = scene.img_tier.height1()
wfactor = 381.1/imgwidth
hfactor = 24.000/imgheight
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
scene.drawImage(scene.img_tier, scale = (wfactor,-hfactor), coord = (w/1.587, h-((0.055*h)*(n+1))-(0.219*h)))
if scene.img_selected:
imgwidth = scene.img_selected.width1()
imgheight = scene.img_selected.height1()
wfactor = 381.5/imgwidth
hfactor = 36.000/imgheight
scene.drawImage(scene.img_selected, scale = (wfactor,-hfactor), coord = (w/1.587, y*1.2-h*.213))
icon = None
if isinstance(item, Song.SongInfo):
if item.icon != "":
try:
icon = scene.itemIcons[item.icon]
imgwidth = icon.width1()
wfactor = 23.000/imgwidth
scene.drawImage(icon, scale = (wfactor,-wfactor), coord = (w/2.86, h-((0.055*h)*(n+1))-(0.219*h)))
except KeyError:
pass
c1,c2,c3 = self.song_name_selected_color
glColor3f(c1,c2,c3)
if item.getLocked():
text = item.getUnlockText()
elif scene.careerMode and not item.completed:
text = _("Play To Advance")
elif scene.practiceMode:
text = _("Practice")
elif item.count:
count = int(item.count)
if count == 1:
text = _("Played Once")
else:
text = _("Played %d times.") % count
else:
text = _("Quickplay")
elif isinstance(item, Song.LibraryInfo):
try:
icon = scene.itemIcons["Library"]
imgwidth = icon.width1()
wfactor = 23.000/imgwidth
scene.drawImage(icon, scale = (wfactor,-wfactor), coord = (w/2.86, h-((0.055*h)*(n+1))-(0.219*h)))
except KeyError:
pass
c1,c2,c3 = self.library_selected_color
glColor3f(c1,c2,c3)
if item.songCount == 1:
text = _("There Is 1 Song In This Setlist.")
elif item.songCount > 1:
text = _("There Are %d Songs In This Setlist.") % (item.songCount)
else:
text = ""
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
text = _("Tier")
c1,c2,c3 = self.career_title_color
glColor3f(c1,c2,c3)
elif isinstance(item, Song.RandomSongInfo):
try:
icon = scene.itemIcons["Random"]
imgwidth = icon.width1()
wfactor = 23.000/imgwidth
scene.drawImage(icon, scale = (wfactor,-wfactor), coord = (w/2.86, h-((0.055*h)*(n+1))-(0.219*h)))
except KeyError:
pass
text = _("Random Song")
c1,c2,c3 = self.career_title_color
glColor3f(c1,c2,c3)
font.render(text, (0.92, .13), scale = 0.0012, align = 2)
maxwidth = .45
if isinstance(item, Song.SongInfo) or isinstance(item, Song.LibraryInfo) or isinstance(item, Song.RandomSongInfo):
c1,c2,c3 = self.song_name_selected_color
glColor4f(c1,c2,c3,1)
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
c1,c2,c3 = self.career_title_color
glColor4f(c1,c2,c3,1)
text = item.name
if isinstance(item, Song.SongInfo) and item.getLocked():
text = _("-- Locked --")
if isinstance(item, Song.SongInfo): #MFH - add indentation when tier sorting
if scene.tiersPresent or icon:
text = " " + text
# evilynux - Force uppercase display for Career titles
if isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
maxwidth = .55
text = string.upper(text)
scale = .0015
wt, ht = font.getStringSize(text, scale = scale)
while wt > maxwidth:
tlength = len(text) - 4
text = text[:tlength] + "..."
wt, ht = font.getStringSize(text, scale = scale)
if wt < .45:
break
font.render(text, (.35, .0413*(n+1)+.15), scale = scale) #add theme option for song_listCD_xpos
if isinstance(item, Song.SongInfo):
score = _("Nil")
stars = 0
name = ""
if not item.getLocked():
try:
difficulties = item.partDifficulties[scene.scorePart.id]
except KeyError:
difficulties = []
for d in difficulties:
if d.id == scene.scoreDifficulty.id:
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
break
else:
score, stars, name = 0, 0, "---"
if score == _("Nil") and scene.nilShowNextScore: #MFH
for d in difficulties: #MFH - just take the first valid difficulty you can find and display it.
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
break
else:
score, stars, name = 0, 0, "---"
else:
score, stars, name = _("Nil"), 0, "---"
scale = 0.0009
if score is not _("Nil") and score > 0 and notesTotal != 0:
text = "%.1f%% (%d)" % ((float(notesHit) / notesTotal) * 100.0, noteStreak)
w, h = font.getStringSize(text, scale=scale)
font.render(text, (.92, .0413*(n+1)+.163), scale=scale, align = 2)
text = str(score)
font.render(text, (.92, .0413*(n+1)+.15), scale=scale, align = 2)
def renderItem(self, scene, color, label):
if not scene.itemMesh:
return
if color:
glColor3f(*color)
glEnable(GL_COLOR_MATERIAL)
if self.setlist_type == 2:
glRotate(90, 0, 0, 1)
glRotate(((scene.time - scene.lastTime) * 2 % 360) - 90, 1, 0, 0)
scene.itemMesh.render("Mesh_001")
glColor3f(.1, .1, .1)
scene.itemMesh.render("Mesh")
if label and scene.label:
glEnable(GL_TEXTURE_2D)
label.bind()
glColor3f(1, 1, 1)
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
scene.label.render("Mesh_001")
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
if shaders.enable("cd"):
scene.itemMesh.render("Mesh_001")
shaders.disable()
def renderLibrary(self, scene, color, label):
if not scene.libraryMesh:
return
if color:
glColor3f(*color)
glEnable(GL_NORMALIZE)
glEnable(GL_COLOR_MATERIAL)
if self.setlist_type == 2:
glRotate(-180, 0, 1, 0)
glRotate(-90, 0, 0, 1)
glRotate(((scene.time - scene.lastTime) * 4 % 360) - 90, 1, 0, 0)
scene.libraryMesh.render("Mesh_001")
glColor3f(.1, .1, .1)
scene.libraryMesh.render("Mesh")
# Draw the label if there is one
if label and scene.libraryLabel:
glEnable(GL_TEXTURE_2D)
label.bind()
glColor3f(1, 1, 1)
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
scene.libraryLabel.render()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
glDisable(GL_NORMALIZE)
def renderTitle(self, scene, color, label):
if not scene.tierMesh:
return
if color:
glColor3f(*color)
glEnable(GL_NORMALIZE)
glEnable(GL_COLOR_MATERIAL)
scene.tierMesh.render("Mesh_001")
glColor3f(.1, .1, .1)
scene.tierMesh.render("Mesh")
# Draw the label if there is one
if label:
glEnable(GL_TEXTURE_2D)
label.bind()
glColor3f(1, 1, 1)
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
scene.libraryLabel.render()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
glDisable(GL_NORMALIZE)
def renderRandom(self, scene, color, label):
if not scene.itemMesh:
return
if color:
glColor3f(*color)
glEnable(GL_NORMALIZE)
glEnable(GL_COLOR_MATERIAL)
scene.itemMesh.render("Mesh_001")
glColor3f(.1, .1, .1)
scene.itemMesh.render("Mesh")
# Draw the label if there is one
if label:
glEnable(GL_TEXTURE_2D)
label.bind()
glColor3f(1, 1, 1)
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
scene.libraryLabel.render()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
glDisable(GL_NORMALIZE)
def renderAlbumArt(self, scene):
if not scene.itemLabels:
return
if self.setlist_type == 0:
w, h = scene.geometry
try:
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
gluPerspective(60, scene.aspectRatio, 0.1, 1000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glEnable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
glDepthMask(1)
offset = 0
if scene.time < 40:
offset = 10*((40 - scene.time)/40.0)**4
scene.camera.origin = (-10 + offset, -scene.cameraOffset, 4 - self.song_cd_xpos + offset)
scene.camera.target = ( 0 + offset, -scene.cameraOffset, 2.5 - self.song_cd_xpos + offset)
scene.camera.apply()
y = 0.0
for i, item in enumerate(scene.items):
c = math.sin(scene.itemRenderAngles[i] * math.pi / 180)
if isinstance(item, Song.SongInfo):
h = c * 4.0 + (1 - c) * .8
elif isinstance(item, Song.LibraryInfo):
h = c * 4.0 + (1 - c) * 1.2
elif isinstance(item, Song.TitleInfo) or isinstance(item, Song.SortTitleInfo):
h = c * 4.0 + (1 - c) * 2.4
elif isinstance(item, Song.RandomSongInfo):
h = c * 4.0 + (1 - c) * .8
else:
continue
d = (y + h * .5 + scene.camera.origin[1]) / (4 * (scene.camera.target[2] - scene.camera.origin[2]))
if i == scene.selectedIndex:
scene.selectedOffset = y + h / 2
self.theme.setSelectedColor()
else:
self.theme.setBaseColor()
glTranslatef(0, -h / 2, 0)
glPushMatrix()
if abs(d) < 1.2:
label = scene.itemLabels[i]
if label == "Random":
label = scene.img_random_label
if label == False:
label = scene.img_empty_label
if isinstance(item, Song.SongInfo):
glRotate(scene.itemRenderAngles[i], 0, 0, 1)
self.renderItem(scene, item.cassetteColor, label)
elif isinstance(item, Song.LibraryInfo):
#myfingershurt: cd cases are backwards
glRotate(-scene.itemRenderAngles[i], 0, 1, 0) #spin 90 degrees around y axis
glRotate(-scene.itemRenderAngles[i], 0, 1, 0) #spin 90 degrees around y axis again, now case is corrected
glRotate(-scene.itemRenderAngles[i], 0, 0, 1) #bring cd case up for viewing
if i == scene.selectedIndex:
glRotate(((scene.time - scene.lastTime) * 4 % 360) - 90, 1, 0, 0)
self.renderLibrary(scene, item.color, label)
elif isinstance(item, Song.TitleInfo):
#myfingershurt: cd cases are backwards
glRotate(-scene.itemRenderAngles[i], 0, 0.5, 0) #spin 90 degrees around y axis
glRotate(-scene.itemRenderAngles[i], 0, 0.5, 0) #spin 90 degrees around y axis again, now case is corrected
glRotate(-scene.itemRenderAngles[i], 0, 0, 0.5) #bring cd case up for viewing
if i == scene.selectedIndex:
glRotate(((scene.time - scene.lastTime) * 4 % 360) - 90, 1, 0, 0)
self.renderTitle(scene, item.color, label)
elif isinstance(item, Song.RandomSongInfo):
#myfingershurt: cd cases are backwards
glRotate(scene.itemRenderAngles[i], 0, 0, 1)
self.renderRandom(scene, item.color, label)
glPopMatrix()
glTranslatef(0, -h/2, 0)
y+= h
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
glDepthMask(0)
finally:
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
elif self.setlist_type == 1:
return
elif self.setlist_type == 2:
w, h = scene.geometry
try:
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
gluPerspective(60, scene.aspectRatio, 0.1, 1000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glEnable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
glDepthMask(1)
offset = 0
if scene.time < 40:
offset = 10*((40 - scene.time)/40.0)**4
scene.camera.origin = (-9,(5.196/scene.aspectRatio) - (5.196*2/scene.aspectRatio)*self.song_listcd_cd_ypos,(5.196*scene.aspectRatio)-(5.196*2*scene.aspectRatio)*self.song_listcd_cd_xpos)
scene.camera.target = ( 0,(5.196/scene.aspectRatio) - (5.196*2/scene.aspectRatio)*self.song_listcd_cd_ypos,(5.196*scene.aspectRatio)-(5.196*2*scene.aspectRatio)*self.song_listcd_cd_xpos)
scene.camera.apply()
y = 0.0
glPushMatrix()
item = scene.selectedItem
i = scene.selectedIndex
label = scene.itemLabels[i]
if label == "Random":
label = scene.img_random_label
if not label:
label = scene.img_empty_label
if isinstance(item, Song.SongInfo):
if scene.labelType:
self.renderItem(scene, item.cassetteColor, label)
else:
self.renderLibrary(scene, item.cassetteColor, label)
elif isinstance(item, Song.LibraryInfo):
self.renderLibrary(scene, item.color, label)
elif isinstance(item, Song.RandomSongInfo):
if scene.labelType:
self.renderItem(scene, None, label)
else:
self.renderLibrary(scene, None, label)
glPopMatrix()
glTranslatef(0, -h / 2, 0)
y += h
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
glDepthMask(0)
finally:
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
#resets the rendering
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
viewport = glGetIntegerv(GL_VIEWPORT)
w = viewport[2] - viewport[0]
h = viewport[3] - viewport[1]
h *= (float(w) / float(h)) / (4.0 / 3.0)
glOrtho(0, 1, h/w, 0, -100, 100)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
self.theme.setBaseColor(1)
elif self.setlist_type == 3:
w, h = scene.geometry
item = scene.items[scene.selectedIndex]
i = scene.selectedIndex
img = None
lockImg = None
if scene.itemLabels[i] == "Random":
if scene.img_random_label:
img = scene.img_random_label
imgwidth = img.width1()
wfactor = 155.000/imgwidth
elif scene.img_empty_label:
img = scene.img_empty_label
imgwidth = img.width1()
wfactor = 155.000/imgwidth
elif not scene.itemLabels[i]:
if scene.img_empty_label != None:
imgwidth = scene.img_empty_label.width1()
wfactor = 155.000/imgwidth
img = scene.img_empty_label
elif scene.itemLabels[i]:
img = scene.itemLabels[i]
imgwidth = img.width1()
wfactor = 155.000/imgwidth
if isinstance(item, Song.SongInfo) and item.getLocked():
if scene.img_locked_label:
imgwidth = scene.img_locked_label.width1()
wfactor2 = 155.000/imgwidth
lockImg = scene.img_locked_label
elif scene.img_empty_label:
imgwidth = scene.img_empty_label.width1()
wfactor = 155.000/imgwidth
img = scene.img_empty_label
if img:
scene.drawImage(img, scale = (wfactor,-wfactor), coord = (.21*w,.59*h))
if lockImg:
scene.drawImage(lockImg, scale = (wfactor2,-wfactor2), coord = (.21*w,.59*h))
def renderForeground(self, scene):
font = scene.fontDict['songListFont']
w, h = scene.geometry
if self.setlist_type == 2:
text = scene.scorePart.text
scale = 0.00250
glColor3f(1, 1, 1)
font.render(text, (0.95, 0.000), scale=scale, align = 2)
elif self.setlist_type == 3:
font = scene.fontDict['songListFont']
c1,c2,c3 = self.song_rb2_diff_color
glColor3f(c1,c2,c3)
font.render(_("DIFFICULTY"), (.095, .5325), scale = 0.0018)
scale = 0.0014
text = _("BAND")
font.render(text, (.17, .5585), scale = scale, align = 2)
text = _("GUITAR")
font.render(text, (.17, .5835), scale = scale, align = 2)
text = _("DRUM")
font.render(text, (.17, .6085), scale = scale, align = 2)
text = _("BASS")
font.render(text, (.17, .6335), scale = scale, align = 2)
text = _("VOCALS")
font.render(text, (.17, .6585), scale = scale, align = 2)
#Add support for lead and rhythm diff
#Qstick - Sorting Text
text = _("SORTING:") + " "
if scene.sortOrder == 0: #title
text = text + _("ALPHABETICALLY BY TITLE")
elif scene.sortOrder == 1: #artist
text = text + _("ALPHABETICALLY BY ARTIST")
elif scene.sortOrder == 2: #timesplayed
text = text + _("BY PLAY COUNT")
elif scene.sortOrder == 3: #album
text = text + _("ALPHABETICALLY BY ALBUM")
elif scene.sortOrder == 4: #genre
text = text + _("ALPHABETICALLY BY GENRE")
elif scene.sortOrder == 5: #year
text = text + _("BY YEAR")
elif scene.sortOrder == 6: #Band Difficulty
text = text + _("BY BAND DIFFICULTY")
elif scene.sortOrder == 7: #Band Difficulty
text = text + _("BY INSTRUMENT DIFFICULTY")
else:
text = text + _("BY SONG COLLECTION")
font.render(text, (.13, .152), scale = 0.0017)
if scene.songLoader:
font.render(_("Loading Preview..."), (.05, .7), scale = 0.001)
return
if scene.img_list_button_guide:
scene.drawImage(scene.img_list_button_guide, scale = (.5, -.5), coord = (w*.5,0), fit = 2)
if scene.songLoader:
font.render(_("Loading Preview..."), (.5, .7), align = 1)
if scene.searching:
font.render(scene.searchText, (.5, .7), align = 1)
if scene.img_list_fg:
scene.drawImage(scene.img_list_fg, scale = (1.0, -1.0), coord = (w/2,h/2), stretched = 3)
def renderSelectedInfo(self, scene):
if self.setlist_type == 0: #note... clean this up. this was a rush job.
if not scene.selectedItem:
return
font = scene.fontDict['font']
screenw, screenh = scene.geometry
v = 0
lfont = font
# here we reset the rendering... without pushing the matrices. (they be thar)
# (otherwise copying engine.view.setOrthogonalProjection)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
viewport = glGetIntegerv(GL_VIEWPORT)
w = viewport[2] - viewport[0]
h = viewport[3] - viewport[1]
h *= (float(w) / float(h)) / (4.0 / 3.0)
glOrtho(0, 1, h/w, 0, -100, 100)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
self.theme.setBaseColor(1)
if scene.songLoader:
font.render(_("Loading Preview..."), (.05, .7), scale = 0.001)
#x = .6
x = self.song_cdscore_xpos
y = .15
self.theme.setSelectedColor(1)
c1,c2,c3 = self.song_name_selected_color
glColor3f(c1,c2,c3)
item = scene.selectedItem
angle = scene.itemRenderAngles[scene.selectedIndex]
f = ((90.0 - angle) / 90.0) ** 2
cText = item.name
if (isinstance(item, Song.SongInfo) and item.getLocked()):
cText = _("-- Locked --")
fh = lfont.getHeight()*0.0016
lfont.render(cText, (x, y), scale = 0.0016)
if isinstance(item, Song.SongInfo):
self.theme.setBaseColor(1)
c1,c2,c3 = self.artist_selected_color
glColor3f(c1,c2,c3)
if not item.year == "":
yeartag = ", "+item.year
else:
yeartag = ""
cText = item.artist + yeartag
if (item.getLocked()):
cText = "" # avoid giving away artist of locked song
# evilynux - Use font w/o outline
lfont.render(cText, (x, y+fh), scale = 0.0016)
if item.count:
self.theme.setSelectedColor(1)
c1,c2,c3 = self.song_name_selected_color
glColor3f(c1,c2,c3)
count = int(item.count)
if count == 1:
text = _("Played %d time") % count
else:
text = _("Played %d times") % count
if item.getLocked():
text = item.getUnlockText()
elif scene.careerMode and not item.completed:
text = _("Play To Advance.")
font.render(text, (x, y+2*fh), scale = 0.001)
else:
text = _("Never Played")
if item.getLocked():
text = item.getUnlockText()
elif scene.careerMode and not item.completed:
text = _("Play To Advance.")
lfont.render(text, (x, y+3*fh), scale = 0.001)
self.theme.setSelectedColor(1 - v)
c1,c2,c3 = self.songlistcd_score_color
glColor3f(c1,c2,c3)
scale = 0.0011
#x = .6
x = self.song_cdscore_xpos
y = .42
try:
difficulties = item.partDifficulties[scene.scorePart.id]
except KeyError:
difficulties = []
if len(difficulties) > 3:
y = .42
elif len(difficulties) == 0:
score, stars, name = "---", 0, "---"
for d in difficulties:
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
else:
score, stars, name = "---", 0, "---"
self.theme.setBaseColor(1)
font.render(Song.difficulties[d.id].text, (x, y), scale = scale)
starscale = 0.02
stary = 1.0 - y/scene.fontScreenBottom
scene.drawStarScore(screenw, screenh, x+.01, stary-2*fh, stars, starscale, hqStar = True) #volshebnyi
self.theme.setSelectedColor(1)
# evilynux - Also use hit%/noteStreak SongList option
if scores:
if notesTotal != 0:
score = "%s %.1f%%" % (score, (float(notesHit) / notesTotal) * 100.0)
if noteStreak != 0:
score = "%s (%d)" % (score, noteStreak)
font.render(unicode(score), (x + .15, y), scale = scale)
font.render(name, (x + .15, y + fh), scale = scale)
y += 2 * fh
elif isinstance(item, Song.LibraryInfo):
self.theme.setBaseColor(1)
c1,c2,c3 = self.library_selected_color
glColor3f(c1,c2,c3)
if item.songCount == 1:
songCount = _("One Song In This Setlist")
else:
songCount = _("%d Songs In This Setlist") % item.songCount
font.render(songCount, (x, y + 3*fh), scale = 0.0016)
elif isinstance(item, Song.RandomSongInfo):
self.theme.setBaseColor(1 - v)
c1,c2,c3 = self.song_name_selected_color
glColor3f(c1,c2,c3)
font.render(_("(Random Song)"), (x, y + 3*fh), scale = 0.0016)
#MFH CD list
text = scene.scorePart.text
scale = 0.00250
#glColor3f(1, 1, 1)
c1,c2,c3 = self.song_name_selected_color
glColor3f(c1,c2,c3)
w, h = font.getStringSize(text, scale=scale)
font.render(text, (0.95-w, 0.000), scale=scale)
# finally:
# pass
elif self.setlist_type == 1:
return
elif self.setlist_type == 2:
if not scene.selectedItem:
return
item = scene.selectedItem
font = scene.fontDict['font']
w, h = scene.geometry
lfont = font
fh = lfont.getHeight()*0.0016
if isinstance(item, Song.SongInfo):
angle = scene.itemRenderAngles[scene.selectedIndex]
f = ((90.0 - angle) / 90.0) ** 2
self.theme.setSelectedColor(1)
c1,c2,c3 = self.songlistcd_score_color
glColor4f(c1,c2,c3,1)
scale = 0.0013
x = self.song_listcd_score_xpos
y = self.song_listcd_score_ypos + f / 2.0
try:
difficulties = item.partDifficulties[scene.scorePart.id]
except KeyError:
difficulties = []
score, stars, name = "---", 0, "---"
if len(difficulties) > 3:
y = self.song_listcd_score_ypos + f / 2.0
#new
for d in difficulties:
scores = item.getHighscores(d, part = scene.scorePart)
if scores:
score, stars, name, scoreExt = scores[0]
try:
notesHit, notesTotal, noteStreak, modVersion, handicap, handicapLong, originalScore = scoreExt
except ValueError:
notesHit, notesTotal, noteStreak, modVersion, oldScores1, oldScores2 = scoreExt
handicap = 0
handicapLong = "None"
originalScore = score
else:
score, stars, name = "---", 0, "---"
font.render(Song.difficulties[d.id].text, (x, y), scale = scale)
starscale = 0.02
starx = x + starscale/2
stary = 1.0 - (y / scene.fontScreenBottom) - fh - starscale
scene.drawStarScore(w, h, starx, stary, stars, starscale) #MFH
c1,c2,c3 = self.songlistcd_score_color
glColor3f(c1,c2,c3)
if scores:
if notesTotal != 0:
score = "%s %.1f%%" % (score, (float(notesHit) / notesTotal) * 100.0)
if noteStreak != 0:
score = "%s (%d)" % (score, noteStreak)
font.render(unicode(score), (x + .15, y), scale = scale)
font.render(name, (x + .15, y + fh), scale = scale)
y += 2 * fh + f / 4.0
elif self.setlist_type == 3:
w, h = scene.geometry
font = scene.fontDict['songListFont']
item = scene.selectedItem
if isinstance(item, Song.SongInfo):
text = item.artist
if (item.getLocked()):
text = "" # avoid giving away artist of locked song
scale = 0.0015
wt, ht = font.getStringSize(text, scale=scale)
while wt > .21:
tlength = len(text) - 4
text = text[:tlength] + "..."
wt, ht = font.getStringSize(text, scale = scale)
if wt < .22:
break
c1,c2,c3 = self.artist_text_color
glColor3f(c1,c2,c3)
text = string.upper(text)
font.render(text, (.095, .44), scale = scale)
if scene.img_diff3 != None:
imgwidth = scene.img_diff3.width1()
imgheight = scene.img_diff3.height1()
wfactor1 = 13.0/imgwidth
albumtag = item.album
albumtag = string.upper(albumtag)
wt, ht = font.getStringSize(albumtag, scale=scale)
while wt > .21:
tlength = len(albumtag) - 4
albumtag = albumtag[:tlength] + "..."
wt, ht = font.getStringSize(albumtag, scale = scale)
if wt < .22:
break
font.render(albumtag, (.095, .47), scale = 0.0015)
genretag = item.genre
font.render(genretag, (.095, .49), scale = 0.0015)
yeartag = item.year
font.render(yeartag, (.095, .51), scale = 0.0015)
for i in range(5):
glColor3f(1, 1, 1)
if i == 0:
diff = item.diffSong
elif i == 1:
diff = item.diffGuitar
elif i == 2:
diff = item.diffDrums
elif i == 3:
diff = item.diffBass
elif i == 4:
diff = item.diffVocals
if scene.img_diff1 == None or scene.img_diff2 == None or scene.img_diff3 == None:
if diff == -1:
font.render("N/A", (.18, .5585 + i*.025), scale = 0.0014)
elif diff == 6:
glColor3f(1, 1, 0)
font.render(str("*" * (diff -1)), (.18, 0.5685 + i*.025), scale = 0.003)
else:
font.render(str("*" * diff + " " * (5 - diff)), (.18, 0.5685 + i*.025), scale = 0.003)
else:
if diff == -1:
font.render("N/A", (.18, .5585 + i*.025), scale = 0.0014)
elif diff == 6:
for k in range(0,5):
scene.drawImage(scene.img_diff3, scale = (wfactor1,-wfactor1), coord = ((.19+.03*k)*w, (0.2354-.0333*i)*h))
else:
for k in range(0,diff):
scene.drawImage(scene.img_diff2, scale = (wfactor1,-wfactor1), coord = ((.19+.03*k)*w, (0.2354-.0333*i)*h))
for k in range(0, 5-diff):
scene.drawImage(scene.img_diff1, scale = (wfactor1,-wfactor1), coord = ((.31-.03*k)*w, (0.2354-.0333*i)*h))
def renderMoreInfo(self, scene):
if not scene.items:
return
if not scene.selectedItem:
return
item = scene.selectedItem
i = scene.selectedIndex
y = 0
w, h = scene.geometry
font = scene.fontDict['songListFont']
self.theme.fadeScreen(0.25)
if scene.moreInfoTime < 500:
y = 1.0-(float(scene.moreInfoTime)/500.0)
yI = y*h
if scene.img_panel:
scene.drawImage(scene.img_panel, scale = (1.0, -1.0), coord = (w*.5,h*.5+yI), stretched = 3)
if scene.img_tabs:
r0 = (0, (1.0/3.0), 0, .5)
r1 = ((1.0/3.0),(2.0/3.0), 0, .5)
r2 = ((2.0/3.0),1.0,0,.5)
if scene.infoPage == 0:
r0 = (0, (1.0/3.0), .5, 1.0)
scene.drawImage(scene.img_tab1, scale = (.5, -.5), coord = (w*.5,h*.5+yI))
text = item.name
if item.artist != "":
text += " by %s" % item.artist
if item.year != "":
text += " (%s)" % item.year
scale = font.scaleText(text, .45, .0015)
font.render(text, (.52, .25-y), scale = scale, align = 1)
if scene.itemLabels[i]:
imgwidth = scene.itemLabels[i].width1()
wfactor = 95.000/imgwidth
scene.drawImage(scene.itemLabels[i], (wfactor, -wfactor), (w*.375,h*.5+yI))
elif scene.img_empty_label:
imgwidth = scene.img_empty_label.width1()
wfactor = 95.000/imgwidth
scene.drawImage(scene.img_empty_label, (wfactor, -wfactor), (w*.375,h*.5+yI))
text = item.album
if text == "":
text = _("No Album")
scale = font.scaleText(text, .2, .0015)
font.render(text, (.56, .305-y), scale = scale)
text = item.genre
if text == "":
text = _("No Genre")
scale = font.scaleText(text, .2, .0015)
font.render(text, (.56, .35-y), scale = scale)
elif scene.infoPage == 1:
r1 = ((1.0/3.0),(2.0/3.0), .5, 1.0)
scene.drawImage(scene.img_tab2, scale = (.5, -.5), coord = (w*.5,h*.5+yI))
elif scene.infoPage == 2:
r2 = ((2.0/3.0),1.0, .5, 1.0)
scene.drawImage(scene.img_tab3, scale = (.5, -.5), coord = (w*.5,h*.5+yI))
scene.drawImage(scene.img_tabs, scale = (.5*(1.0/3.0), -.25), coord = (w*.36,h*.72+yI), rect = r0)
scene.drawImage(scene.img_tabs, scale = (.5*(1.0/3.0), -.25), coord = (w*.51,h*.72+yI), rect = r1)
scene.drawImage(scene.img_tabs, scale = (.5*(1.0/3.0), -.25), coord = (w*.66,h*.72+yI), rect = r2)
def renderMiniLobby(self, scene):
return
__all__ = ["LEFT", "CENTER", "RIGHT", "_", "Theme", "shaders", "Setlist"]
| west2554/fofix | src/Theme.py | Python | gpl-2.0 | 108,925 |
# coding=utf-8
# author: @netmanchris
# -*- coding: utf-8 -*-
"""
This module contains functions for working with the access controller
capabilities of the HPE IMC WSM Module using the RESTful API
"""
# This section imports required libraries
import json
import requests
from pyhpeimc.auth import HEADERS
def get_ac_info_all(auth, url):
"""
function takes no input as input to RESTFUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element of the list represents a single wireless
controller which has been discovered in the HPE IMC WSM module
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.wsm.acinfo import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ac_info_all = get_ac_info_all(auth.creds, auth.url)
"""
f_url = url + "/imcrs/wlan/acInfo/queryAcBasicInfo"
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
if len(response.text) > 0:
acs = json.loads(response.text)['acBasicInfo']
if type(acs) is dict:
acs = [acs]
return acs
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + " get_ac_info_all: An Error has occured"
| HPNetworking/HP-Intelligent-Management-Center | pyhpeimc/wsm/acinfo.py | Python | apache-2.0 | 1,510 |
"""Configure pytest for Dyson tests."""
from unittest.mock import patch
from libpurecool.dyson_device import DysonDevice
import pytest
from homeassistant.components.dyson import DOMAIN
from homeassistant.core import HomeAssistant
from .common import BASE_PATH, CONFIG
from tests.common import async_setup_component
@pytest.fixture()
async def device(hass: HomeAssistant, request) -> DysonDevice:
"""Fixture to provide Dyson 360 Eye device."""
platform = request.module.PLATFORM_DOMAIN
get_device = request.module.async_get_device
if hasattr(request, "param"):
if isinstance(request.param, list):
device = get_device(*request.param)
else:
device = get_device(request.param)
else:
device = get_device()
with patch(f"{BASE_PATH}.DysonAccount.login", return_value=True), patch(
f"{BASE_PATH}.DysonAccount.devices", return_value=[device]
), patch(f"{BASE_PATH}.PLATFORMS", [platform]):
# PLATFORMS is patched so that only the platform being tested is set up
await async_setup_component(
hass,
DOMAIN,
CONFIG,
)
await hass.async_block_till_done()
return device
| aronsky/home-assistant | tests/components/dyson/conftest.py | Python | apache-2.0 | 1,218 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import logging
import os
from absl import app
from absl import flags
from contextlib import suppress
from datetime import datetime
from main import post_processor
logging.basicConfig(
filename=f'postprocessor-{datetime.now().strftime("%Y-%m-%d-%H:%M:%S")}.log',
format='%(asctime)s %(message)s',
datefmt='%Y-%m-%d %I:%M:%S %p',
level=logging.DEBUG
)
FLAGS = flags.FLAGS
flags.DEFINE_string('name',
None,
'filename')
flags.DEFINE_string('project', None,
'GCP Project to act on. Default is the environment value.')
flags.DEFINE_string('dataset', 'report2bq',
'BQ Dataset to act on. Default is "report2bq".')
flags.DEFINE_string('table', None,
'BQ Table to act on.')
flags.DEFINE_string('report_id', None, 'The report id that caused this.')
flags.DEFINE_string('product', None, 'The report type.')
flags.DEFINE_integer('rows', 0, 'Number of rows imported the table.')
flags.DEFINE_string('columns', None,
'A ";" delimited list of columns in the table.')
def main(unused_argv):
project = FLAGS.project or os.environ('GCP_PROJECT')
event = {
'data': base64.b64encode(FLAGS.name.encode('utf-8')),
'attributes': {
'project': project,
'dataset': FLAGS.dataset,
'table': FLAGS.table,
'rows': str(FLAGS.rows),
'id': FLAGS.report_id,
'type': FLAGS.product,
'columns': FLAGS.columns
}
}
post_processor(event, None)
if __name__ == '__main__':
with suppress(SystemExit):
app.run(main)
| google/report2bq | application/cli/postprocessor.py | Python | apache-2.0 | 2,165 |
# (C) British Crown Copyright 2011 - 2018, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
try:
from unittest import mock
except ImportError:
import mock
import pytest
import cartopy.crs as ccrs
from cartopy.mpl.geoaxes import GeoAxes
from cartopy.mpl.gridliner import LATITUDE_FORMATTER, LONGITUDE_FORMATTER
from cartopy.tests.mpl import MPL_VERSION, ImageTesting
@pytest.mark.natural_earth
@ImageTesting(['gridliner1'])
def test_gridliner():
ny, nx = 2, 4
plt.figure(figsize=(10, 10))
ax = plt.subplot(nx, ny, 1, projection=ccrs.PlateCarree())
ax.set_global()
ax.coastlines()
ax.gridlines()
ax = plt.subplot(nx, ny, 2, projection=ccrs.OSGB())
ax.set_global()
ax.coastlines()
ax.gridlines()
ax = plt.subplot(nx, ny, 3, projection=ccrs.OSGB())
ax.set_global()
ax.coastlines()
ax.gridlines(ccrs.PlateCarree(), color='blue', linestyle='-')
ax.gridlines(ccrs.OSGB())
ax = plt.subplot(nx, ny, 4, projection=ccrs.PlateCarree())
ax.set_global()
ax.coastlines()
ax.gridlines(ccrs.NorthPolarStereo(), alpha=0.5,
linewidth=1.5, linestyle='-')
ax = plt.subplot(nx, ny, 5, projection=ccrs.PlateCarree())
ax.set_global()
ax.coastlines()
osgb = ccrs.OSGB()
ax.set_extent(tuple(osgb.x_limits) + tuple(osgb.y_limits), crs=osgb)
ax.gridlines(osgb)
ax = plt.subplot(nx, ny, 6, projection=ccrs.NorthPolarStereo())
ax.set_global()
ax.coastlines()
ax.gridlines(alpha=0.5, linewidth=1.5, linestyle='-')
ax = plt.subplot(nx, ny, 7, projection=ccrs.NorthPolarStereo())
ax.set_global()
ax.coastlines()
osgb = ccrs.OSGB()
ax.set_extent(tuple(osgb.x_limits) + tuple(osgb.y_limits), crs=osgb)
ax.gridlines(osgb)
ax = plt.subplot(nx, ny, 8,
projection=ccrs.Robinson(central_longitude=135))
ax.set_global()
ax.coastlines()
ax.gridlines(ccrs.PlateCarree(), alpha=0.5, linewidth=1.5, linestyle='-')
delta = 1.5e-2
plt.subplots_adjust(left=0 + delta, right=1 - delta,
top=1 - delta, bottom=0 + delta)
def test_gridliner_specified_lines():
xs = [0, 60, 120, 180, 240, 360]
ys = [-90, -60, -30, 0, 30, 60, 90]
ax = mock.Mock(_gridliners=[], spec=GeoAxes)
gl = GeoAxes.gridlines(ax, xlocs=xs, ylocs=ys)
assert isinstance(gl.xlocator, mticker.FixedLocator)
assert isinstance(gl.ylocator, mticker.FixedLocator)
assert gl.xlocator.tick_values(None, None).tolist() == xs
assert gl.ylocator.tick_values(None, None).tolist() == ys
# The tolerance on this test is particularly high because of the high number
# of text objects. A new testing strategy is needed for this kind of test.
if MPL_VERSION >= '2.0':
grid_label_image = 'gridliner_labels'
else:
grid_label_image = 'gridliner_labels_1.5'
@pytest.mark.natural_earth
@ImageTesting([grid_label_image])
def test_grid_labels():
fig = plt.figure(figsize=(8, 10))
crs_pc = ccrs.PlateCarree()
crs_merc = ccrs.Mercator()
crs_osgb = ccrs.OSGB()
ax = fig.add_subplot(3, 2, 1, projection=crs_pc)
ax.coastlines()
ax.gridlines(draw_labels=True)
# Check that adding labels to Mercator gridlines gives an error.
# (Currently can only label PlateCarree gridlines.)
ax = fig.add_subplot(3, 2, 2,
projection=ccrs.PlateCarree(central_longitude=180))
ax.coastlines()
with pytest.raises(TypeError):
ax.gridlines(crs=crs_merc, draw_labels=True)
ax.set_title('Known bug')
gl = ax.gridlines(crs=crs_pc, draw_labels=True)
gl.xlabels_top = False
gl.ylabels_left = False
gl.xlines = False
ax = fig.add_subplot(3, 2, 3, projection=crs_merc)
ax.coastlines()
ax.gridlines(draw_labels=True)
# Check that labelling the gridlines on an OSGB plot gives an error.
# (Currently can only draw these on PlateCarree or Mercator plots.)
ax = fig.add_subplot(3, 2, 4, projection=crs_osgb)
ax.coastlines()
with pytest.raises(TypeError):
ax.gridlines(draw_labels=True)
ax.remove()
ax = fig.add_subplot(3, 2, 4, projection=crs_pc)
ax.coastlines()
gl = ax.gridlines(
crs=crs_pc, linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_bottom = True
gl.ylabels_right = True
gl.xlines = False
gl.xlocator = mticker.FixedLocator([-180, -45, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 15, 'color': 'gray'}
gl.xlabel_style = {'color': 'red'}
gl.xpadding = 10
gl.ypadding = 15
# trigger a draw at this point and check the appropriate artists are
# populated on the gridliner instance
fig.canvas.draw()
assert len(gl.xlabel_artists) == 4
assert len(gl.ylabel_artists) == 5
assert len(gl.ylabel_artists) == 5
assert len(gl.xline_artists) == 0
ax = fig.add_subplot(3, 2, 5, projection=crs_pc)
ax.set_extent([-20, 10.0, 45.0, 70.0])
ax.coastlines()
ax.gridlines(draw_labels=True)
ax = fig.add_subplot(3, 2, 6, projection=crs_merc)
ax.set_extent([-20, 10.0, 45.0, 70.0], crs=crs_pc)
ax.coastlines()
ax.gridlines(draw_labels=True)
# Increase margins between plots to stop them bumping into one another.
fig.subplots_adjust(wspace=0.25, hspace=0.25)
| pelson/cartopy | lib/cartopy/tests/mpl/test_gridliner.py | Python | lgpl-3.0 | 6,136 |
import json
from pimlico.datatypes.corpora.json import JsonDocumentType
from pimlico.cli.browser.tools.formatter import DocumentBrowserFormatter
class JsonFormatter(DocumentBrowserFormatter):
DATATYPE = JsonDocumentType()
def format_document(self, doc):
return json.dumps(doc.data, indent=4)
| markgw/pimlico | src/python/pimlico/datatypes/corpora/formatters/json.py | Python | gpl-3.0 | 313 |
#---------------------------------------------------------------------------------
# Vega Strike script for a quest
# Copyright (C) 2008 Vega Strike team
# Contact: hellcatv@sourceforge.net
# Internet: http://vegastrike.sourceforge.net/
#.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Description: A small dispute about docking priority between a merchant and an
# ISO ship. Based on a story and original script by PeteyG
# Author: pyramid
# Version: 2008-04-20
#
#---------------------------------------------------------------------------------
# import used libraries
import quest
import Director
import VS
import Vector
import universe
import unit
import launch
import news
import gettext
class quest_dispute (quest.quest):
def __init__ (self):
self.player = VS.getPlayer()
self.stage = 1
self.timer = VS.GetGameTime()
self.talktime = VS.GetGameTime()
self.anitime = VS.GetGameTime()
self.merColor = "#99FFFF"
self.socColor = "#FF9999"
# This will get an agricultural station from the system, and assign it to 'station'
# this station is the only one that has only one docking port, thus fits perfectly
# into this quest theme
self.station = unit.getUnitByName('Agricultural_Station')
# the flow of the quest
# (0) check if player is in system
# (1) launch the actor units
# (2) start docking dispute conversation
# (3) play dispute comm animation
# (4) start armed conflict
# (5) socialist call for help
# (6) play comm animation
# (7) wait for player interaction - check who's still alive
# (11) socialist reward
# (21) merchant reward
# (12/22) play the comm animation
# (30) let the winner dock at the station
# (99) finish quest
# there could be more variety:
# (a) confed militia flying in to get the trouble makers
# (b) destoyed ships spawning escape pods
# (c) some provoking conversation during the conflict
# (d) breaking off the battle after some time (and off to dock)
# (e) station calling for the guys to stop hitting her
def Execute (self):
if (self.player and self.station):
# launches the particpating actors
if (self.stage==1 and VS.GetGameTime()>self.timer and self.station.getDistance(self.player)<50000):
self.stageLaunchActors()
self.stage = 2
# checks to see if the self.player is within 10km of the station to initiate the dispute
if (self.stage==2 and VS.GetGameTime()>self.timer and self.station.getDistance(self.player)<15000):
self.stageDockingDispute()
self.talktime = VS.GetGameTime()
self.stage = 3
# play the talk animation
if (self.stage==3 and VS.GetGameTime()<self.timer and VS.GetGameTime()>=self.anitime):
# check which animation to play
for index in range (len(self.sequence)):
if (VS.GetGameTime()-self.talktime>=self.sequence[index][0] and VS.GetGameTime()-self.talktime<=self.sequence[index][0]+self.sequence[index][1]):
# play the animation
self.player.commAnimation(self.animations[self.sequence[index][2]][0])
# this is how long one animation takes
self.anitime = VS.GetGameTime()+2
if (self.stage==3 and VS.GetGameTime()>=self.timer):
self.stage = 4
# get the merchant to attack
if (self.stage==4 and VS.GetGameTime()>self.timer):
# the two ships start to attack each other
self.merchant.SetTarget(self.socialist)
VS.AdjustRelation(self.merchant.getFactionName(),self.socialist.getFactionName(),-5,1)
self.merchant.LoadAIScript("default")
# attack directive - no ai change, no target change
self.merchant.setFgDirective("A.")
self.timer = VS.GetGameTime()+5
self.stage = 5
# get the socialist to attack
if (self.stage==5 and VS.GetGameTime()>self.timer):
# the privateer gets involved... or does he?
VS.IOmessage (0,"[Lenin's Mercy]","privateer",self.socColor+_("Mayday! We are under attack! Privateer, please help us... we are no match for them. We have wounded on board!"))
VS.IOmessage (6,"[VulCorp Transport A-5]","privateer",self.merColor+_("Privateer, if you look the other way... you will be duly compensated."))
self.animations = [["com_dispute_socialist.ani",2],["com_dispute_merchant.ani",2]]
self.sequence = [[0,6,0],[6,4,1]]
self.talktime = VS.GetGameTime()
VS.AdjustRelation(self.socialist.getFactionName(),self.merchant.getFactionName(),-5,1)
self.socialist.SetTarget(self.merchant)
self.socialist.LoadAIScript("default")
self.socialist.setFgDirective("A.")
self.timer = VS.GetGameTime()+10
self.stage = 6
# play the talk animation
if (self.stage==6 and VS.GetGameTime()<self.timer and VS.GetGameTime()>=self.anitime):
for index in range (len(self.sequence)):
if (VS.GetGameTime()-self.talktime>=self.sequence[index][0] and VS.GetGameTime()-self.talktime<=self.sequence[index][0]+self.sequence[index][1]):
self.player.commAnimation(self.animations[self.sequence[index][2]][0])
self.anitime = VS.GetGameTime()+2
if (self.stage==6 and VS.GetGameTime()>=self.timer):
self.stage = 7
# we need to refresh the ai during battle since it lasts for only 7 seconds
if (self.stage==7 and VS.GetGameTime()>self.timer):
self.merchant.LoadAIScript("default")
self.socialist.LoadAIScript("default")
self.timer = VS.GetGameTime()+2
# evaluate the conflict result
if (self.stage==7 and self.merchant.isNull()):
VS.IOmessage (0,"[VulCorp Transport A-5]","all",self.merColor+_("Oh nooooo...!!!!!"))
self.player.commAnimation("com_dispute_merchant.ani")
self.stage = 11
if (self.stage==7 and self.socialist.isNull()):
VS.IOmessage (0,"[Lenin's Mercy]","all",self.socColor+_("Liberte! Egalite!! Fraternite...!!!!!"))
self.player.commAnimation("com_dispute_socialist.ani")
self.stage = 21
# if the merchant has died, give player the socialist reward
if (self.stage==11 and VS.GetGameTime()>self.timer):
self.socialist.PrimeOrders()
VS.IOmessage (0,"[Lenin's Mercy]","privateer",self.socColor+_("Thank you, Privateer! The Interstellar Socialist Organization is in your debt. We are getting our wounded to the base's medical facility."))
VS.IOmessage (5,"[Lenin's Mercy]","privateer",self.socColor+_("We have no money... but we are transmitting you the coordinates of the cargo we dumped to make room for the attack victims. Do with it what you will."))
VS.IOmessage (10,"[Lenin's Mercy]","privateer",self.socColor+_("You have made a friend with the ISO today. Have a safe journey."))
self.animations = [["com_dispute_socialist.ani",2]]
self.sequence = [[0,15,0]]
self.talktime = VS.GetGameTime()
# launches various types of cargo as reward
# launch_wave_around_unit (fgname, faction, type, ai, nr_ships, minradius, maxradius, my_unit,logo='',useani=1,skipdj=0)
self.cargo = launch.launch_wave_around_unit("Cargo",'neutral','iron_ore.cargo',"default",2,3000,6000,self.player)
self.cargo = launch.launch_wave_around_unit("Cargo",'neutral','tungsten_ore.cargo',"default",2,3000,6000,self.player)
self.cargo = launch.launch_wave_around_unit("Cargo",'neutral','generic_cargo',"default",16,3000,6000,self.player)
# reputation with ISO goes up. Not sure of the numbers
VS.AdjustRelation(self.player.getFactionName(),self.socialist.getFactionName(),1,5)
# publish news
text = _("PRIVATEER SAVES SHIPLOAD OF WOUNDED\\\Today, an unprecedented dispute about landing priorities took place close to a station in the Regallis system of Sol sector. ")
text += _("A merchant was delivering a priority shipment to a station in the system while an ISO transport vessel requested emergency landing having twelve rescued passengers on board who were previously wounded in a pirate attack. ")
text += _("A privateer approaching that base at the same time, and assisting the dispute, reacted to the situation before security forces could arrive at the scene and promptly removed the capitalist bloodsucker, thus saving many lives. ")
text += _("Presently, the injured are being taken care of at the medical facilities of the station with two heavily wounded remaining under intensive care.")
news.publishNews(text)
# set next stage conditions
self.timer = VS.GetGameTime()+15
self.winner = self.socialist
self.stage = 12
# play the talk animation
if (self.stage==12 and VS.GetGameTime()<self.timer and VS.GetGameTime()>=self.anitime):
for index in range (len(self.sequence)):
if (VS.GetGameTime()-self.talktime>=self.sequence[index][0] and VS.GetGameTime()-self.talktime<=self.sequence[index][0]+self.sequence[index][1]):
self.player.commAnimation(self.animations[self.sequence[index][2]][0])
self.anitime = VS.GetGameTime()+2
if (self.stage==12 and VS.GetGameTime()>=self.timer):
self.stage = 30
# if the merchant ship is still alive
if (self.stage==21 and VS.GetGameTime()>self.timer):
self.merchant.PrimeOrders()
# if the merchant is still friends with the self.player, the merchant gives him a nice chunk of cash
if (5 > -.1):
VS.IOmessage (0,"[VulCorp Transport A-5]","privateer",self.merColor+_("Privateer, thank you for your cooperation."))
VS.IOmessage (3,"[VulCorp Transport A-5]","privateer",self.merColor+_("We will be able to make a killing on this shipment thanks to you. Here are 15000 credits for your trouble."))
self.animations = [["com_dispute_merchant.ani",2]]
self.sequence = [[0,8,0]]
self.talktime = VS.GetGameTime()
self.player.addCredits(15000)
# rep with merchants goes up
VS.AdjustRelation(self.player.getFactionName(),self.merchant.getFactionName(),.1,.5)
# publish news
text = _("MALICIOUS MERCHANT MASSACRES MARXIST MERCY MISSION\\\Today, an unprecedented dispute about landing priorities took place close to a station in the Regallis system of Sol sector. ")
text += _("A merchant was delivering a priority shipment to a station in the system while an ISO transport vessel requested emergency landing having twelve rescued passengers on board who were previously wounded in a pirate attack. ")
text += _("Before security forces could arrive at the scene the merchant pilot promptly applied his own justice scheme thus reducing the other vessel cum content to space dust.")
news.publishNews(text)
# set next stage conditions
self.timer = VS.GetGameTime()+8
self.winner = self.merchant
self.stage = 22
# play the talk animation
if (self.stage==22 and VS.GetGameTime()<self.timer and VS.GetGameTime()>=self.anitime):
for index in range (len(self.sequence)):
if (VS.GetGameTime()-self.talktime>=self.sequence[index][0] and VS.GetGameTime()-self.talktime<=self.sequence[index][0]+self.sequence[index][1]):
self.player.commAnimation(self.animations[self.sequence[index][2]][0])
self.anitime = VS.GetGameTime()+2
if (self.stage==22 and VS.GetGameTime()>=self.timer):
self.stage = 30
# let the remaining ship approach the station and dock
if (self.stage==30 and VS.GetGameTime()>self.timer):
self.timer = VS.GetGameTime()+5
#if (not self.station.isDocked(self.winner)):
if (not self.winner.isNull()):
# performDockingOperations is unusable
# 1st it lets the ship fly through the stations
# 2nd it doesn't dock the unit
#self.winner.performDockingOperations(self.station,1)
unit.approachAndDock(self.winner,self.station)
else:
self.stage = 99
if (self.stage==99 and VS.GetGameTime()>self.timer):
self.playernum = -1
self.name = "quest_dispute"
self.removeQuest()
self.stage += 1 # don't enter this loop anymore
return 0
return 1
def stageLaunchActors(self):
radius = self.station.rSize()
# launch the two freighters
self.merchant = launch.launch_wave_around_unit("VulCorp Transport A-5","merchant_guild","Plowshare","ai_sitting_duck.py",1,radius,radius*2,self.station)
self.socialist = launch.launch_wave_around_unit("Lenin's Mercy","ISO","Llama","ai_sitting_duck.py",1,radius,radius*2,self.station)
# make the ship a real sitting duck, won't accept any other orders
self.merchant.PrimeOrders()
self.socialist.PrimeOrders()
# make the actors oriented towards the station
unit.faceTaget(self.merchant, self.station)
unit.faceTaget(self.socialist, self.station)
# make the actors fly close to the docking port
self.merchant.performDockingOperations(self.station,False)
self.socialist.performDockingOperations(self.station,False)
def stageDockingDispute(self):
# the comm interchange between the two ships
VS.IOmessage (0,"[VulCorp Transport A-5]","all",self.merColor+_("VulCorp Transport alpha five requesting priority docking."))
VS.IOmessage (5,"[VulCorp Transport A-5]","all",self.merColor+_("We have a load of spare parts that needs to be delivered within the next half hour, or else we don't get paid."))
VS.IOmessage (15,"[Lenin's Mercy]","all",self.socColor+_("Negative, transport Lenin's Mercy requesting emergency docking. We have thirteen critically injured passengers."))
VS.IOmessage (25,"[Lenin's Mercy]","all",self.socColor+_("We picked them up after a squadron of pirates attacked their ship. They need immediate medical attention!"))
VS.IOmessage (35,"[VulCorp Transport A-5]","all",self.merColor+_("Station control, might we remind you that we have a contract with your base? We demand priority in the docking queue so we can complete our transaction."))
VS.IOmessage (45,"[Lenin's Mercy]","all",self.socColor+_("You capitalist pigs! We have dying men and women on board, and all you can think about is your filthy money!"))
VS.IOmessage (55,"[VulCorp Transport A-5]","all",self.merColor+_("Socialist vessel: Stay out of the docking queue or you will be fired upon. We will not let a bunch of bleeding communists turn this major deal sour!"))
VS.IOmessage (65,"[Lenin's Mercy]","all",self.socColor+_("Negative, VulCorp Transport. The lives of our passengers are worth more than your profits!"))
VS.IOmessage (75,"[VulCorp Transport A-5]","all",self.merColor+_("All batteries! Open fire!!"))
# initialize the animation parameters
# the animations to be alternated - animation file and animation duration
self.animations = [["com_dispute_merchant.ani",2],["com_dispute_socialist.ani",2]]
# states the beginning, duration, and animation number to be played
self.sequence = [[0,10,0],[15,15,1],[35,5,0],[45,5,1],[55,5,0],[65,5,1],[75,4,0]]
# set the beginning of the talk sequence
self.talktime = VS.GetGameTime()
self.timer = VS.GetGameTime()+80
class quest_dispute_factory (quest.quest_factory):
# call this class from the adventure file
def __init__ (self):
quest.quest_factory.__init__ (self,"quest_dispute")
def create (self):
return quest_dispute()
class Executor(Director.Mission):
# call this class from the mission file
def __init__(self, classesToExecute):
Director.Mission.__init__(self)
self.classes = classesToExecute
def Execute(self):
for c in self.classes:
c.Execute()
| costalfy/Vega-Strike | data/modules/quests/quest_dispute.py | Python | gpl-2.0 | 17,423 |
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
if nums == []:
return None
m = (len(nums) - 1) / 2
root = TreeNode(nums[m])
root.left = self.sortedArrayToBST(nums[:m])
root.right = self.sortedArrayToBST(nums[m + 1:])
return root
| Chasego/cod | leetcode/108-Convert-Sorted-Array-to-Binary-Search-Tree/ConvSrtArr2BST_001_rec.py | Python | mit | 553 |
# 主要是为了使用中文显示 app 于 admin 界面
default_app_config = 'bespeak_meal.apps.Bespeak_meal_config'
| zhengxinxing/bespeak_meal | __init__.py | Python | mit | 120 |
import html.parser
import re
import glob
class _Parser(html.parser.HTMLParser):
def __init__(self):
self.subs = []
super().__init__()
def handle_pi(self,data):
data = data.strip()
if data.startswith('?'):
data = data[1:]
if data.startswith("python"):
data = data[6:]
if data.endswith('?'):
data = data[:-1]
self.subs.append(to_html(data))
class Template(Output):
def __init__(self,path :str="template.html"):
try:
self.rep = {}
self.arg = []
self.__s = ''
self.func_dict = {}
self.set_template(path)
self.parser = _Parser()
except FileError as e:
raise FileError("Not Valid Template {}".format(path)) from e
def set_template(self,path :str):
""" Set template file. This file must be in File limits (use whitelist if you need) """
self.data = File.get_contents(path)
def templ_exec(self,s :str) -> str:
self.parser.feed(s)
to_exec = self.parser.subs
self.parser.subs = []
for i in to_exec:
t = self.exec(i.strip())
s = re.sub(r"<\?python\s*{}\s*\?>".format(re.escape(i.strip())),t,to_html(s),count=1)
return s
def exec(self,s :str) -> str:
exec(s,{},self.func_dict)
t = self.__s
self.__s = ''
return t
def print(self,*args,**kwargs):
sep = kwargs['sep'] if 'sep' in kwargs.keys() else ''
self.__s += sep.join(args)
def write(self,*args,**kwargs):
self.arg.extend(list(args))
self.rep.update(kwargs)
def __add__(self,f):
self.func_dict[f.__name__] = f
return self
def get_body(self) -> str:
return self.templ_exec(self.data).format(*tuple(self.arg),**self.rep)
def ret(stat :str, head , body :str, filename :str):
if not filename.endswith(".py") or (stat and stat[:3]!="200"):
return Response(stat,head,body,filename)
try:
exec(File.get_contents(filename).replace("__builtins__",'') if conf.query("secure_lock") else File.get_contents(filename),globals())
except BaseException as e:
if conf.query("debug_user_script"):
import traceback
return Response("500 Internal Server Error",head,traceback.format_exc(),filename)
else:
log.notice(e)
return Response("200 OK",head,output.get_body(),filename)
output = Template(conf.query("template_file"))
t_plug = Plugins()
for i in glob.iglob("/".join((conf.query("plugin_dir"),"template/*"))):
if i == "/".join((conf.query("plugin_dir"),"template")):
continue
t_plug.load_plugin(i)
output = (t_plug.exec(Sandbox(["print","repl","Template","output"],conf.query("sand_limits"),log),{ "print" : output.print, "repl" : output.rep, "Template" : Template, "output" : output }))['output']
del t_plug
sandbox = Sandbox(sandbox.allowed_vars.append("Template"),sandbox.new_limits,log)
dispatch += ret
def print(*args,**kwargs):
output.write(*args,**kwargs)
| Malex/FrozenFW | frozen/plugins/Template.py | Python | gpl-3.0 | 2,730 |
# -*- coding: utf-8 -*-
#
# workspace-tools documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 26 01:16:43 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'workspace-tools'
copyright = u'2014, Max Zheng'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.20'
# The full version, including alpha/beta/rc tags.
release = '0.2.20'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'workspace-toolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'workspace-tools.tex', u'workspace-tools Documentation',
u'Max Zheng', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'workspace-tools', u'workspace-tools Documentation',
[u'Max Zheng'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'workspace-tools', u'workspace-tools Documentation',
u'Max Zheng', 'workspace-tools', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
autoclass_content = "both"
| maxzheng/workspace-tools | docs/conf.py | Python | mit | 8,268 |
#!/usr/bin/python
import os
import sys
import warnings
# the site module must be imported for normal behavior to take place; it is
# done dynamically so that cx_Freeze will not add all modules referenced by
# the site module to the frozen executable
__import__("site")
# now locate the pth file to modify the path appropriately
baseName, ext = os.path.splitext(FILE_NAME)
pathFileName = baseName + ".pth"
sys.path = [s.strip() for s in file(pathFileName).read().splitlines()] + \
sys.path
| jnoortheen/nigandu | src/initscript.py | Python | gpl-2.0 | 500 |
import os
import re
import urllib
from django.conf import settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.test import TestCase
from django.core import mail
from django.core.urlresolvers import reverse
from django.http import QueryDict
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
self.old_LANGUAGES = settings.LANGUAGES
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
settings.LANGUAGES = (('en', 'English'),)
settings.LANGUAGE_CODE = 'en'
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
def tearDown(self):
settings.LANGUAGES = self.old_LANGUAGES
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assert_(SESSION_KEY in self.client.session)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEquals(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContains(response, "That e-mail address doesn't have an associated user account")
self.assertEquals(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
self.assert_("http://" in mail.outbox[0].body)
self.assertEquals(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
self.assertEquals("staffmember@example.com", mail.outbox[0].from_email)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEquals(response.status_code, 302)
self.assertEquals(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assert_(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertEquals(response.status_code, 200)
self.assert_("Please enter your new password" in response.content)
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456/1-1/')
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' anewpassword'})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assert_(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEquals(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assert_(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertEquals(response.status_code, 200)
self.assert_("The password reset link was invalid" in response.content)
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' x'})
self.assertEquals(response.status_code, 200)
self.assert_("The two password fields didn't match" in response.content)
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEquals(response.status_code, 200)
self.assert_("Please enter a correct username and password. Note that both fields are case-sensitive." in response.content)
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEquals(response.status_code, 200)
self.assert_("Your old password was entered incorrectly. Please enter it again." in response.content)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
}
)
self.assertEquals(response.status_code, 200)
self.assert_("The two password fields didn't match." in response.content)
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEquals(response.status_code, 302)
self.assert_(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEquals(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEquals(response.context['site'], site)
self.assertEquals(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assert_(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEquals(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/'):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url)
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEquals(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
class LoginURLSettings(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEquals(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url,
'http://testserver%s?%s' % (login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
class LogoutTest(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def confirm_logged_out(self):
self.assert_(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertEquals(200, response.status_code)
self.assert_('Logged out' in response.content)
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assert_(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
| bdelliott/wordgame | web/django/contrib/auth/tests/views.py | Python | mit | 15,291 |
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import logging
import logging.config
import logging.handlers
import os
import warnings
from pprint import pformat
import yaml
from flask import has_request_context, request, session
from indico.core.config import config
from indico.web.util import get_request_info, get_request_user
class AddRequestIDFilter:
def filter(self, record):
# Add our request ID if available
record.request_id = request.id if has_request_context() else '0' * 16
return True
class AddUserIDFilter:
def filter(self, record):
user = get_request_user()[0] if has_request_context() else None
record.user_id = str(session.user.id) if user else '-'
return True
class RequestInfoFormatter(logging.Formatter):
def format(self, record):
rv = super().format(record)
info = get_request_info()
if info:
rv += '\n\n' + pformat(info)
return rv
class FormattedSubjectSMTPHandler(logging.handlers.SMTPHandler):
def getSubject(self, record):
return self.subject % record.__dict__
class BlacklistFilter(logging.Filter):
def __init__(self, names):
self.filters = [logging.Filter(name) for name in names]
def filter(self, record):
return not any(x.filter(record) for x in self.filters)
class Logger:
@classmethod
def init(cls, app):
path = config.LOGGING_CONFIG_PATH
if not path:
return
if not os.path.exists(path):
default_path = os.path.join(app.root_path, 'logging.yaml.sample')
warnings.warn('Logging config file not found; using defaults. '
'Copy {default_path} to {path} to get rid of this warning.'
.format(path=path, default_path=default_path))
path = default_path
with open(path) as f:
data = yaml.safe_load(f)
data['disable_existing_loggers'] = False
data['incremental'] = False
# Make the request ID available in all loggers
data.setdefault('filters', {})
data['filters']['_add_request_id'] = {'()': AddRequestIDFilter}
data['filters']['_add_user_id'] = {'()': AddUserIDFilter}
for handler in data['handlers'].values():
handler.setdefault('filters', [])
handler['filters'].insert(0, '_add_request_id')
handler['filters'].insert(1, '_add_user_id')
if handler['class'] == 'logging.FileHandler' and handler['filename'][0] != '/':
# Make relative paths relative to the log dir
handler['filename'] = os.path.join(config.LOG_DIR, handler['filename'])
elif handler['class'] in ('logging.handlers.SMTPHandler', 'indico.core.logger.FormattedSubjectSMTPHandler'):
# Configure email handlers with the data from the config
if not handler.get('mailhost'):
handler['mailhost'] = config.SMTP_SERVER
handler['secure'] = () if config.SMTP_USE_TLS else None # yuck, empty tuple == STARTTLS
if config.SMTP_LOGIN and config.SMTP_PASSWORD:
handler['credentials'] = (config.SMTP_LOGIN, config.SMTP_PASSWORD)
handler.setdefault('fromaddr', f'logger@{config.WORKER_NAME}')
handler.setdefault('toaddrs', [config.SUPPORT_EMAIL])
subject = ('Unexpected Exception occurred at {}: %(message)s'
if handler['class'] == 'indico.core.logger.FormattedSubjectSMTPHandler' else
'Unexpected Exception occurred at {}')
handler.setdefault('subject', subject.format(config.WORKER_NAME))
for formatter in data['formatters'].values():
# Make adding request info to log entries less ugly
if formatter.pop('append_request_info', False):
assert '()' not in formatter
formatter['()'] = RequestInfoFormatter
# Enable the database logger for our db_log util
if config.DB_LOG:
data['loggers']['indico._db'] = {'level': 'DEBUG', 'propagate': False, 'handlers': ['_db']}
data['handlers']['_db'] = {'class': 'logging.handlers.SocketHandler', 'host': '127.0.0.1', 'port': 9020}
# If customization debugging is enabled, ensure we get the debug log messages from it
if config.CUSTOMIZATION_DEBUG and config.CUSTOMIZATION_DIR:
data['loggers'].setdefault('indico.customization', {})['level'] = 'DEBUG'
logging.config.dictConfig(data)
@classmethod
def get(cls, name=None):
"""Get a logger with the given name.
This behaves pretty much like `logging.getLogger`, except for
prefixing any logger name with ``indico.`` (if not already
present).
"""
if name is None:
name = 'indico'
elif name != 'indico' and not name.startswith('indico.'):
name = 'indico.' + name
return logging.getLogger(name)
| pferreir/indico | indico/core/logger.py | Python | mit | 5,242 |
import xmlrpclib
import sys
sys.path.append('../plagiabot')
from plagiabot_config import ithenticate_user, ithenticate_password
from flup.server.fcgi import WSGIServer
from cgi import parse_qs, escape
#cgitb.enable()
def get_view_url(report_id):
report='a'
a=''
try:
a='a'
server_i = xmlrpclib.ServerProxy("https://api.ithenticate.com/rpc")
a='b'
login_response = server_i.login({"username": ithenticate_user, "password": ithenticate_password})
a='log'
assert (login_response['status'] == 200)
a='loged'
sid = login_response['sid']
a='log1'
report = server_i.report.get({'id': report_id, 'sid': sid})
a='log2'
return report['view_only_url']
#return report
except xmlrpclib.ProtocolError as e:
return ';-('#+'!'+e.__class__.__name__+e.errmsg+'!'+str(e.errcode)+'%s'%e.headers
except Exception as e:
return ';('
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
q=parse_qs(environ['QUERY_STRING'])
yield '<h1>Redirecting to similarity report...</h1>'
if 'rid' in q and len(q['rid']) > 0:
url=get_view_url(q['rid'][0])
if 'http' in url:
#yield '<br>To get to report please follow <a href="%s">%s</a>.'%(url, url)
yield '<script>window.location.href="%s";</script>'%url
else:
yield url
WSGIServer(app).run()
| Jobava/plagiabot | ithenticate.py | Python | mit | 1,465 |
# -*- coding: utf-8 -*-
import re
from os.path import splitext
from vilya.libs.consts import IS_GENERATED, MINIFIED
PEG_REGEX = re.compile(
'^(?:[^\/]|\/[^\*])*\/\*(?:[^\*]|\*[^\/])*Generated by PEG.js', re.DOTALL)
GENERATED_REGEX = re.compile('^\/\/ Generated by ', re.DOTALL)
class Generated(object):
def __init__(self, name, data):
self.name = name
self.ext_name = splitext(self.name)[1]
self._data = data
@classmethod
def is_generated(cls, name, data):
return cls(name, data)._is_generated
@property
def data(self):
if hasattr(self, 'real_data'):
return self.real_data
self.real_data = self._data() if callable(self._data) else self._data
return self.real_data
@property
def lines(self):
return self.data and self.data.split("\n", -1) or []
@property
def _is_generated(self):
return self.name == "Gemfile.lock" \
or self.name.endswith(MINIFIED) \
or self.is_minified_javascript \
or self.is_compiled_coffeescript \
or self.is_xcode_project_file
# or self.is_generated_net_docfile \
# or self.is_generated_parser
@property
def is_xcode_project_file(self):
"""
Internal: Is the blob an XCode project file?
Generated if the file extension is an XCode project
file extension.
Returns True of False.
"""
return self.ext_name in IS_GENERATED
@property
def is_minified_javascript(self):
"""
Internal: Is the blob minified JS?
Consider JS minified if the average line length is
greater then 100c.
Returns True or False.
"""
if self.ext_name != '.js':
return False
lines = self.lines
if lines:
return reduce(lambda x, y: x + len(y), lines, 0) / len(lines) > 100
return False
@property
def is_compiled_coffeescript(self):
"""
Internal: Is the blob of JS generated by CoffeeScript?
CoffeScript is meant to output JS that would be difficult to
tell if it was generated or not. Look for a number of patterns
output by the CS compiler.
Return True or False
"""
if self.ext_name != ".js":
return False
lines = self.lines
# CoffeeScript generated by > 1.2 include a comment on the first line
if len(lines) > 0 \
and GENERATED_REGEX.search(lines[0]):
return True
if len(lines) < 3:
return False
if len(lines) > 2 \
and lines[0] == '(function() {' \
and lines[-2] == '}).call(this);' \
and lines[-1] == '':
# First line is module closure opening
# Second to last line closes module closure
# Last line is blank
score = 0
count_keys = lambda r, s: len(re.compile(r, re.DOTALL).findall(s))
for line in lines:
if re.compile('var ', re.DOTALL).search(line):
# Underscored temp vars are likely to be Coffee
score += 1 * count_keys(
'(_fn|_i|_len|_ref|_results)', line)
# bind and extend functions are very Coffee specific
score += 3 * count_keys(
'(__bind|__extends|__hasProp|__indexOf|__slice)', line)
return score > 3
return False
@property
def is_generated_net_docfile(self):
"""
Internal: Is this a generated documentation file for a .NET assembly?
.NET developers often check in the XML Intellisense file along with an
assembly - however, these don't have a special extension, so we have to
dig into the contents to determine if it's a docfile. Luckily, these
files are extremely structured, so recognizing them is easy.
Returns True or False
return false unless extname.downcase == ".xml"
return false unless lines.count > 3
.NET Docfiles always open with <doc> and their first tag is an
<assembly> tag
"""
lines = self.lines
if len(lines) < 5:
return False
return '<doc>' in lines[1] \
and '<assembly>' in lines[2] \
and '</doc>' in lines[-2]
@property
def is_generated_parser(self):
"""
Internal: Is the blob of JS a parser generated by PEG.js?
PEG.js-generated parsers are not meant to be consumed by humans.
Return True or False
"""
if self.ext_name != ".js":
return False
# PEG.js-generated parsers include a comment near the top of the file
# that marks them as such.
lines = self.lines
if len(lines) < 5:
return False
if PEG_REGEX.search(''.join(lines[0:5])):
return True
return False
| xtao/code | vilya/libs/generated.py | Python | bsd-3-clause | 5,041 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-20 14:27
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
replaces = [('plugin_anchor_menu', '0001_initial'), ('plugin_anchor_menu', '0002_dropdownanchormenupluginmodel')]
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='AnchorPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='plugin_anchor_menu_anchorpluginmodel', serialize=False, to='cms.CMSPlugin')),
('title', models.CharField(max_length=254, verbose_name='Title')),
('slug', models.SlugField(max_length=255, verbose_name='Slug')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='DropDownAnchorMenuPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='plugin_anchor_menu_dropdownanchormenupluginmodel', serialize=False, to='cms.CMSPlugin')),
('menu_id', models.SlugField(default='anchor_menu', max_length=254, verbose_name='The internal menu ID')),
('link_type', models.CharField(choices=[('h', "Add '#anchor' to browser addess."), ('s', "Don't add '#anchor' to browser addess.")], default='s', max_length=1, verbose_name='Link type')),
('first_label', models.CharField(blank=True, default='Please select', help_text='Label for the first option. (The first anchor will be used, if empty)', max_length=254, null=True, verbose_name='First label')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| jedie/django-cms-tools | django_cms_tools/plugin_anchor_menu/migrations/0001_squashed_0002_dropdownanchormenupluginmodel.py | Python | gpl-3.0 | 2,156 |
import sys
import math
positions = []
current = int(raw_input("Enter starting point: "))
dir = int(raw_input("Enter starting direction (0=decreasing, 1=increasing): "))
while True: #Gather the input
n = raw_input("Input integer position (inter nothing to end input): ")
if n == "":
break
n=int(n)
positions.append(n)
#Start by printing the starting point
print(str(current) + ", ")
# Go though the entire list and print the shortest path
# This is n^2 but of a small list
while len(positions) > 0:
diff=sys.maxint
index=0
i=0
for pos in positions:
if diff > (math.fabs(pos-current)):
if (current < pos and dir == 1) or (current > pos and dir == 0):
diff=math.fabs(pos-current)
index=i
i=i+1
if diff==sys.maxint:
if dir == 0:
dir=1
else:
dir=0
else:
current=positions.pop(index)
print(str(current) + ", ")
raw_input("Press enter to continue") | dberliner/CS423_test_help | elevator_SCAN.py | Python | mit | 891 |
from __future__ import absolute_import
import sys
import warnings
from collections import defaultdict
import numpy as np
from holoviews.core.util import isscalar, unique_iterator, unique_array, pd
from holoviews.core.data import Dataset, Interface, MultiInterface
from holoviews.core.data.interface import DataError
from holoviews.core.data import PandasInterface
from holoviews.core.data.spatialpandas import get_value_array
from holoviews.core.dimension import dimension_name
from holoviews.element import Path
from ..util import geom_to_array, geom_types, geom_length
from .geom_dict import geom_from_dict
class GeoPandasInterface(MultiInterface):
types = ()
datatype = 'geodataframe'
multi = True
@classmethod
def loaded(cls):
return 'geopandas' in sys.modules
@classmethod
def applies(cls, obj):
if not cls.loaded():
return False
from geopandas import GeoDataFrame, GeoSeries
return isinstance(obj, (GeoDataFrame, GeoSeries))
@classmethod
def geo_column(cls, data):
try:
return data.geometry.name
except AttributeError:
if len(data):
raise ValueError('No geometry column found in geopandas.DataFrame, '
'use the PandasInterface instead.')
return None
@classmethod
def init(cls, eltype, data, kdims, vdims):
import pandas as pd
from geopandas import GeoDataFrame, GeoSeries
if kdims is None:
kdims = eltype.kdims
if isinstance(data, GeoSeries):
data = data.to_frame()
if isinstance(data, list):
if all(isinstance(d, geom_types) for d in data):
data = [{'geometry': d} for d in data]
if all(isinstance(d, dict) and 'geometry' in d and isinstance(d['geometry'], geom_types)
for d in data):
data = GeoDataFrame(data)
if not isinstance(data, GeoDataFrame):
vdims = vdims or eltype.vdims
data = from_multi(eltype, data, kdims, vdims)
elif not isinstance(data, GeoDataFrame):
raise ValueError("GeoPandasInterface only support geopandas "
"DataFrames not %s." % type(data))
elif 'geometry' not in data:
cls.geo_column(data)
if vdims is None:
vdims = [col for col in data.columns if not isinstance(data[col], GeoSeries)]
index_names = data.index.names if isinstance(data, pd.DataFrame) else [data.index.name]
if index_names == [None]:
index_names = ['index']
for kd in kdims+vdims:
kd = dimension_name(kd)
if kd in data.columns:
continue
if any(kd == ('index' if name is None else name)
for name in index_names):
data = data.reset_index()
break
try:
shp_types = {gt[5:] if 'Multi' in gt else gt for gt in data.geom_type}
except:
shp_types = []
if len(shp_types) > 1:
raise DataError('The GeopandasInterface can only read dataframes which '
'share a common geometry type, found %s types.' % shp_types,
cls)
return data, {'kdims': kdims, 'vdims': vdims}, {}
@classmethod
def validate(cls, dataset, vdims=True):
dim_types = 'key' if vdims else 'all'
geom_dims = cls.geom_dims(dataset)
if len(geom_dims) > 0 and len(geom_dims) != 2:
raise DataError('Expected %s instance to declare two key '
'dimensions corresponding to the geometry '
'coordinates but %d dimensions were found '
'which did not refer to any columns.'
% (type(dataset).__name__, len(geom_dims)), cls)
not_found = [d.name for d in dataset.dimensions(dim_types)
if d not in geom_dims and d.name not in dataset.data]
if not_found:
raise DataError("Supplied data does not contain specified "
"dimensions, the following dimensions were "
"not found: %s" % repr(not_found), cls)
@classmethod
def dtype(cls, dataset, dimension):
name = dataset.get_dimension(dimension, strict=True).name
if name not in dataset.data:
return np.dtype('float') # Geometry dimension
return dataset.data[name].dtype
@classmethod
def has_holes(cls, dataset):
from shapely.geometry import Polygon, MultiPolygon
col = cls.geo_column(dataset.data)
for geom in dataset.data[col]:
if isinstance(geom, Polygon) and geom.interiors:
return True
elif isinstance(geom, MultiPolygon):
for g in geom:
if isinstance(g, Polygon) and g.interiors:
return True
return False
@classmethod
def holes(cls, dataset):
from shapely.geometry import Polygon, MultiPolygon
holes = []
col = cls.geo_column(dataset.data)
for geom in dataset.data[col]:
if isinstance(geom, Polygon) and geom.interiors:
holes.append([[geom_to_array(h) for h in geom.interiors]])
elif isinstance(geom, MultiPolygon):
holes += [[[geom_to_array(h) for h in g.interiors] for g in geom]]
else:
holes.append([[]])
return holes
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
if cls.geom_dims(dataset):
df = cls.shape_mask(dataset, selection)
else:
df = dataset.data
if not selection:
return df
elif selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
df = df.iloc[selection_mask]
if indexed and len(df) == 1 and len(dataset.vdims) == 1:
return df[dataset.vdims[0].name].iloc[0]
return df
@classmethod
def shape_mask(cls, dataset, selection):
xdim, ydim = cls.geom_dims(dataset)
xsel = selection.pop(xdim.name, None)
ysel = selection.pop(ydim.name, None)
if xsel is None and ysel is None:
return dataset.data
from shapely.geometry import box
if xsel is None:
x0, x1 = cls.range(dataset, xdim)
elif isinstance(xsel, slice):
x0, x1 = xsel.start, xsel.stop
elif isinstance(xsel, tuple):
x0, x1 = xsel
else:
raise ValueError("Only slicing is supported on geometries, %s "
"selection is of type %s."
% (xdim, type(xsel).__name__))
if ysel is None:
y0, y1 = cls.range(dataset, ydim)
elif isinstance(ysel, slice):
y0, y1 = ysel.start, ysel.stop
elif isinstance(ysel, tuple):
y0, y1 = ysel
else:
raise ValueError("Only slicing is supported on geometries, %s "
"selection is of type %s."
% (ydim, type(ysel).__name__))
bounds = box(x0, y0, x1, y1)
col = cls.geo_column(dataset.data)
df = dataset.data.copy()
df[col] = df[col].intersection(bounds)
return df[df[col].area > 0]
@classmethod
def select_mask(cls, dataset, selection):
mask = np.ones(len(dataset.data), dtype=np.bool)
for dim, k in selection.items():
if isinstance(k, tuple):
k = slice(*k)
arr = dataset.data[dim].values
if isinstance(k, slice):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'invalid value encountered')
if k.start is not None:
mask &= k.start <= arr
if k.stop is not None:
mask &= arr < k.stop
elif isinstance(k, (set, list)):
iter_slcs = []
for ik in k:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'invalid value encountered')
iter_slcs.append(arr == ik)
mask &= np.logical_or.reduce(iter_slcs)
elif callable(k):
mask &= k(arr)
else:
index_mask = arr == k
if dataset.ndims == 1 and np.sum(index_mask) == 0:
data_index = np.argmin(np.abs(arr - k))
mask = np.zeros(len(dataset), dtype=np.bool)
mask[data_index] = True
else:
mask &= index_mask
return mask
@classmethod
def geom_dims(cls, dataset):
return [d for d in dataset.kdims + dataset.vdims
if d.name not in dataset.data]
@classmethod
def dimension_type(cls, dataset, dim):
geom_dims = cls.geom_dims(dataset)
if dim in geom_dims:
return float
elif len(dataset.data):
return type(dataset.data[dim.name].iloc[0])
else:
return float
@classmethod
def isscalar(cls, dataset, dim, per_geom=False):
"""
Tests if dimension is scalar in each subpath.
"""
dim = dataset.get_dimension(dim)
geom_dims = cls.geom_dims(dataset)
if dim in geom_dims:
return False
elif per_geom:
return all(isscalar(v) or len(list(unique_array(v))) == 1
for v in dataset.data[dim.name])
dim = dataset.get_dimension(dim)
return len(dataset.data[dim.name].unique()) == 1
@classmethod
def range(cls, dataset, dim):
dim = dataset.get_dimension(dim)
geom_dims = cls.geom_dims(dataset)
if dim in geom_dims:
col = cls.geo_column(dataset.data)
idx = geom_dims.index(dim)
bounds = dataset.data[col].bounds
if idx == 0:
return bounds.minx.min(), bounds.maxx.max()
else:
return bounds.miny.min(), bounds.maxy.max()
else:
vals = dataset.data[dim.name]
return vals.min(), vals.max()
@classmethod
def aggregate(cls, columns, dimensions, function, **kwargs):
raise NotImplementedError
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
data = dataset.data.copy()
geom_col = cls.geo_column(dataset.data)
if dim_pos >= list(data.columns).index(geom_col):
dim_pos -= 1
if dimension.name not in data:
data.insert(dim_pos, dimension.name, values)
return data
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
geo_dims = cls.geom_dims(dataset)
if any(d in geo_dims for d in dimensions):
raise DataError("GeoPandasInterface does not allow grouping "
"by geometry dimension.", cls)
return PandasInterface.groupby(dataset, dimensions, container_type, group_type, **kwargs)
@classmethod
def reindex(cls, dataset, kdims=None, vdims=None):
return dataset.data
@classmethod
def sample(cls, columns, samples=[]):
raise NotImplementedError
@classmethod
def sort(cls, dataset, by=[], reverse=False):
geo_dims = cls.geom_dims(dataset)
if any(d in geo_dims for d in by):
raise DataError("SpatialPandasInterface does not allow sorting "
"by geometry dimension.", cls)
return PandasInterface.sort(dataset, by, reverse)
@classmethod
def shape(cls, dataset):
return (cls.length(dataset), len(dataset.dimensions()))
@classmethod
def length(cls, dataset):
geom_type = cls.geom_type(dataset)
if geom_type != 'Point':
return len(dataset.data)
return sum([geom_length(g) for g in dataset.data.geometry])
@classmethod
def nonzero(cls, dataset):
return bool(cls.length(dataset))
@classmethod
def redim(cls, dataset, dimensions):
return PandasInterface.redim(dataset, dimensions)
@classmethod
def values(cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False):
dimension = dataset.get_dimension(dimension)
geom_dims = dataset.interface.geom_dims(dataset)
data = dataset.data
isgeom = (dimension in geom_dims)
geom_col = cls.geo_column(dataset.data)
is_points = cls.geom_type(dataset) == 'Point'
if not len(data):
dtype = float if isgeom else dataset.data[dimension.name].dtype
return np.array([], dtype=dtype)
col = cls.geo_column(dataset.data)
if isgeom and keep_index:
return data[col]
elif not isgeom:
return get_value_array(data, dimension, expanded, keep_index,
geom_col, is_points, geom_length)
column = data[dimension.name]
if not expanded or keep_index or not len(data):
return column if keep_index else column.values
else:
arrays = []
for i, geom in enumerate(data[col]):
length = geom_length(geom)
arrays.append(np.full(length, column.iloc[i]))
return np.concatenate(arrays) if len(arrays) > 1 else arrays[0]
values = []
geom_type = data.geom_type.iloc[0]
ds = dataset.clone(data.iloc[0].to_dict(), datatype=['geom_dictionary'])
for i, row in data.iterrows():
ds.data = row.to_dict()
values.append(ds.interface.values(ds, dimension))
if 'Point' not in geom_type and expanded:
values.append([np.NaN])
values = values if 'Point' in geom_type or not expanded else values[:-1]
if not values:
return np.array([])
elif not expanded:
array = np.empty(len(values), dtype=object)
array[:] = values
return array
elif len(values) == 1:
return values[0]
else:
return np.concatenate(values)
@classmethod
def iloc(cls, dataset, index):
from geopandas import GeoSeries
from shapely.geometry import MultiPoint
rows, cols = index
geom_dims = cls.geom_dims(dataset)
geom_col = cls.geo_column(dataset.data)
scalar = False
columns = list(dataset.data.columns)
if isinstance(cols, slice):
cols = [d.name for d in dataset.dimensions()][cols]
elif np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols).name]
else:
cols = [dataset.get_dimension(d).name for d in index[1]]
if not all(d in cols for d in geom_dims):
raise DataError("Cannot index a dimension which is part of the "
"geometry column of a spatialpandas DataFrame.", cls)
cols = list(unique_iterator([
columns.index(geom_col) if c in geom_dims else columns.index(c) for c in cols
]))
geom_type = dataset.data[geom_col].geom_type.iloc[0]
if geom_type != 'MultiPoint':
if scalar:
return dataset.data.iloc[rows[0], cols[0]]
elif isscalar(rows):
rows = [rows]
return dataset.data.iloc[rows, cols]
geoms = dataset.data[geom_col]
count = 0
new_geoms, indexes = [], []
for i, geom in enumerate(geoms):
length = len(geom)
if np.isscalar(rows):
if count <= rows < (count+length):
new_geoms.append(geom[rows-count])
indexes.append(i)
break
elif isinstance(rows, slice):
if rows.start is not None and rows.start > (count+length):
continue
elif rows.stop is not None and rows.stop < count:
break
start = None if rows.start is None else max(rows.start - count, 0)
stop = None if rows.stop is None else min(rows.stop - count, length)
if rows.step is not None:
dataset.param.warning(".iloc step slicing currently not supported for"
"the multi-tabular data format.")
indexes.append(i)
new_geoms.append(geom[start:stop])
elif isinstance(rows, (list, set)):
sub_rows = [(r-count) for r in rows if count <= r < (count+length)]
if not sub_rows:
continue
indexes.append(i)
new_geoms.append(MultiPoint([geom[r] for r in sub_rows]))
count += length
new = dataset.data.iloc[indexes].copy()
new[geom_col] = GeoSeries(new_geoms)
return new
@classmethod
def split(cls, dataset, start, end, datatype, **kwargs):
objs = []
xdim, ydim = dataset.kdims[:2]
if not len(dataset.data):
return []
row = dataset.data.iloc[0]
col = cls.geo_column(dataset.data)
arr = geom_to_array(row[col])
d = {(xdim.name, ydim.name): arr}
d.update({vd.name: row[vd.name] for vd in dataset.vdims})
geom_type = cls.geom_type(dataset)
ds = dataset.clone([d], datatype=['multitabular'])
for i, row in dataset.data.iterrows():
if datatype == 'geom':
objs.append(row[col])
continue
geom = row[col]
gt = geom_type or get_geom_type(geom)
arr = geom_to_array(geom)
d = {xdim.name: arr[:, 0], ydim.name: arr[:, 1]}
d.update({vd.name: row[vd.name] for vd in dataset.vdims})
ds.data = [d]
if datatype == 'array':
obj = ds.array(**kwargs)
elif datatype == 'dataframe':
obj = ds.dframe(**kwargs)
elif datatype in ('columns', 'dictionary'):
d['geom_type'] = gt
obj = d
elif datatype is None:
obj = ds.clone()
else:
raise ValueError("%s datatype not support" % datatype)
objs.append(obj)
return objs
def get_geom_type(geom):
"""Returns the HoloViews geometry type.
Args:
geom: A shapely geometry
Returns:
A string representing type of the geometry.
"""
from shapely.geometry import (
Point, LineString, Polygon, Ring, MultiPoint, MultiPolygon, MultiLineString
)
if isinstance(geom, (Point, MultiPoint)):
return 'Point'
elif isinstance(geom, (LineString, MultiLineString)):
return 'Line'
elif isinstance(geom, Ring):
return 'Ring'
elif isinstance(geom, (Polygon, MultiPolygon)):
return 'Polygon'
def to_geopandas(data, xdim, ydim, columns=[], geom='point'):
"""Converts list of dictionary format geometries to spatialpandas line geometries.
Args:
data: List of dictionaries representing individual geometries
xdim: Name of x-coordinates column
ydim: Name of y-coordinates column
ring: Whether the data represents a closed ring
Returns:
A spatialpandas.GeoDataFrame version of the data
"""
from geopandas import GeoDataFrame
from shapely.geometry import (
Point, LineString, Polygon, MultiPoint, MultiPolygon, MultiLineString
)
poly = any('holes' in d for d in data) or geom == 'Polygon'
if poly:
single_type, multi_type = Polygon, MultiPolygon
elif geom == 'Line':
single_type, multi_type = LineString, MultiLineString
else:
single_type, multi_type = Point, MultiPoint
converted = defaultdict(list)
for geom_dict in data:
geom_dict = dict(geom_dict)
geom = geom_from_dict(geom_dict, xdim, ydim, single_type, multi_type)
for c, v in geom_dict.items():
converted[c].append(v)
converted['geometry'].append(geom)
return GeoDataFrame(converted, columns=['geometry']+columns)
def from_multi(eltype, data, kdims, vdims):
"""Converts list formats into geopandas.GeoDataFrame.
Args:
eltype: Element type to convert
data: The original data
kdims: The declared key dimensions
vdims: The declared value dimensions
Returns:
A GeoDataFrame containing the data in the list based format.
"""
from geopandas import GeoDataFrame
new_data = []
types = []
xname, yname = (kd.name for kd in kdims[:2])
for d in data:
types.append(type(d))
if isinstance(d, dict):
d = {k: v if isscalar(v) else np.asarray(v) for k, v in d.items()}
new_data.append(d)
continue
new_el = eltype(d, kdims, vdims)
if new_el.interface is GeoPandasInterface:
types[-1] = GeoDataFrame
new_data.append(new_el.data)
continue
new_dict = {}
for d in new_el.dimensions():
if d in (xname, yname):
scalar = False
else:
scalar = new_el.interface.isscalar(new_el, d)
vals = new_el.dimension_values(d, not scalar)
new_dict[d.name] = vals[0] if scalar else vals
new_data.append(new_dict)
if len(set(types)) > 1:
raise DataError('Mixed types not supported')
if new_data and types[0] is GeoDataFrame:
data = pd.concat(new_data)
else:
columns = [d.name for d in kdims+vdims if d not in (xname, yname)]
geom = GeoPandasInterface.geom_type(eltype)
if not len(data):
return GeoDataFrame([], columns=['geometry']+columns)
data = to_geopandas(new_data, xname, yname, columns, geom)
return data
Interface.register(GeoPandasInterface)
Dataset.datatype = Dataset.datatype+['geodataframe']
Path.datatype = Path.datatype+['geodataframe']
| ioam/geoviews | geoviews/data/geopandas.py | Python | bsd-3-clause | 22,492 |
import errno
import os
import pytest
from wal_e.worker import prefetch
from wal_e import worker
@pytest.fixture
def pd(tmpdir):
d = prefetch.Dirs(unicode(tmpdir))
return d
@pytest.fixture
def seg():
return worker.WalSegment('0' * 8 * 3)
@pytest.fixture
def raise_eperm():
def raiser(*args, **kwargs):
e = OSError('bogus EPERM')
e.errno = errno.EPERM
raise e
return raiser
def test_double_create(pd, seg):
pd.create(seg)
pd.create(seg)
def test_atomic_download(pd, seg, tmpdir):
assert not pd.is_running(seg)
pd.create(seg)
assert pd.is_running(seg)
with pd.download(seg) as ad:
s = 'hello'
ad.tf.write(s)
ad.tf.flush()
assert pd.running_size(seg) == len(s)
assert pd.contains(seg)
assert not pd.is_running(seg)
promote_target = tmpdir.join('another-spot')
pd.promote(seg, unicode(promote_target))
pd.clear()
assert not pd.contains(seg)
def test_atomic_download_failure(pd, seg):
"Ensure a raised exception doesn't move WAL into place"
pd.create(seg)
e = Exception('Anything')
with pytest.raises(Exception) as err:
with pd.download(seg):
raise e
assert err.value is e
assert not pd.is_running(seg)
assert not pd.contains(seg)
def test_cleanup_running(pd, seg):
pd.create(seg)
assert pd.is_running(seg)
nxt = seg.future_segment_stream().next()
pd.clear_except([nxt])
assert not pd.is_running(seg)
def test_cleanup_promoted(pd, seg):
pd.create(seg)
assert pd.is_running(seg)
with pd.download(seg):
pass
assert not pd.is_running(seg)
assert pd.contains(seg)
nxt = seg.future_segment_stream().next()
pd.clear_except([nxt])
assert not pd.contains(seg)
def test_running_size_error(pd, seg, monkeypatch, raise_eperm):
pd.create(seg)
monkeypatch.setattr(os, 'listdir', raise_eperm)
with pytest.raises(EnvironmentError):
pd.running_size(seg)
def test_create_error(pd, seg, monkeypatch, raise_eperm):
monkeypatch.setattr(os, 'makedirs', raise_eperm)
assert not pd.create(seg)
def test_clear_error(pd, seg, monkeypatch, raise_eperm):
pd.create(seg)
monkeypatch.setattr(os, 'rmdir', raise_eperm)
pd.clear()
| intoximeters/wal-e | tests/test_prefetch.py | Python | bsd-3-clause | 2,302 |
# Performs raw decompression of various compression algorithms (currently, only deflate).
import os
import zlib
import lzma
import struct
import binwalk.core.compat
import binwalk.core.common
from binwalk.core.module import Option, Kwarg, Module
class LZMAHeader(object):
def __init__(self, **kwargs):
for (k,v) in binwalk.core.compat.iterator(kwargs):
setattr(self, k, v)
class LZMA(object):
DESCRIPTION = "Raw LZMA compression stream"
COMMON_PROPERTIES = [0x5D, 0x6E]
MAX_PROP = ((4 * 5 + 4) * 9 + 8)
BLOCK_SIZE = 32*1024
def __init__(self, module):
self.module = module
self.properties = None
self.build_properties()
self.build_dictionaries()
self.build_headers()
# Add an extraction rule
if self.module.extractor.enabled:
self.module.extractor.add_rule(regex='^%s' % self.DESCRIPTION.lower(), extension="7z", cmd=self.extractor)
def extractor(self, file_name):
# Open and read the file containing the raw compressed data.
# This is not terribly efficient, especially for large files...
compressed_data = binwalk.core.common.BlockFile(file_name).read()
# Re-run self.decompress to detect the properties for this compressed data (stored in self.properties)
if self.decompress(compressed_data[:self.BLOCK_SIZE]):
# Build an LZMA header on top of the raw compressed data and write it back to disk.
# Header consists of the detected properties values, the largest possible dictionary size,
# and a fake output file size field.
header = chr(self.properties) + self.dictionaries[-1] + ("\xFF" * 8)
binwalk.core.common.BlockFile(file_name, "wb").write(header + compressed_data)
# Try to extract it with all the normal lzma extractors until one works
for exrule in self.module.extractor.match("lzma compressed data"):
if self.module.extractor.execute(exrule['cmd'], file_name) == True:
break
def build_property(self, pb, lp, lc):
prop = (((pb * 5) + lp) * 9) + lc
if prop > self.MAX_PROP:
return None
return int(prop)
def parse_property(self, prop):
prop = int(ord(prop))
if prop > self.MAX_PROP:
return None
pb = prop / (9 * 5);
prop -= pb * 9 * 5;
lp = prop / 9;
lc = prop - lp * 9;
return (pb, lp, lc)
def parse_header(self, header):
(pb, lp, lc) = self.parse_property(header[0])
dictionary = struct.unpack("<I", binwalk.core.compat.str2bytes(header[1:5]))[0]
return LZMAHeader(pb=pb, lp=lp, lc=lc, dictionary=dictionary)
def build_properties(self):
self.properties = set()
if self.module.partial_scan == True:
# For partial scans, only check the most common properties values
for prop in self.COMMON_PROPERTIES:
self.properties.add(chr(prop))
else:
for pb in range(0, 9):
for lp in range(0, 5):
for lc in range(0, 5):
prop = self.build_property(pb, lp, lc)
if prop is not None:
self.properties.add(chr(prop))
def build_dictionaries(self):
self.dictionaries = []
if self.module.partial_scan == True:
# For partial scans, only use the largest dictionary value
self.dictionaries.append(binwalk.core.compat.bytes2str(struct.pack("<I", 2**25)))
else:
for n in range(16, 26):
self.dictionaries.append(binwalk.core.compat.bytes2str(struct.pack("<I", 2**n)))
def build_headers(self):
self.headers = set()
for prop in self.properties:
for dictionary in self.dictionaries:
self.headers.add(prop + dictionary + ("\xFF" * 8))
def decompress(self, data):
result = None
description = None
i = 0
for header in self.headers:
i += 1
# The only acceptable exceptions are those indicating that the input data was truncated.
try:
final_data = binwalk.core.compat.str2bytes(header + data)
lzma.decompress(final_data)
result = self.parse_header(header)
break
except IOError as e:
# The Python2 module gives this error on truncated input data.
if str(e) == "unknown BUF error":
result = self.parse_header(header)
break
except Exception as e:
# The Python3 module gives this error on truncated input data.
# The inconsistency between modules is a bit worrisome.
if str(e) == "Compressed data ended before the end-of-stream marker was reached":
result = self.parse_header(header)
break
if result is not None:
self.properties = self.build_property(result.pb, result.lp, result.lc)
description = "%s, properties: 0x%.2X [pb: %d, lp: %d, lc: %d], dictionary size: %d" % (self.DESCRIPTION,
self.properties,
result.pb,
result.lp,
result.lc,
result.dictionary)
return description
class Deflate(object):
'''
Finds and extracts raw deflate compression streams.
'''
ENABLED = False
BLOCK_SIZE = 33*1024
DESCRIPTION = "Raw deflate compression stream"
def __init__(self, module):
self.module = module
# Add an extraction rule
if self.module.extractor.enabled:
self.module.extractor.add_rule(regex='^%s' % self.DESCRIPTION.lower(), extension="deflate", cmd=self.extractor)
def extractor(self, file_name):
out_file = os.path.splitext(file_name)[0]
def decompress(self, data):
valid = True
description = None
# Prepend data with a standard zlib header
data = "\x78\x9C" + data
# Looking for either a valid decompression, or an error indicating truncated input data
try:
zlib.decompress(binwalk.core.compat.str2bytes(data))
except zlib.error as e:
if not str(e).startswith("Error -5"):
# Bad data.
return None
return self.DESCRIPTION
class RawCompression(Module):
TITLE = 'Raw Compression'
CLI = [
Option(short='X',
long='deflate',
kwargs={'enabled' : True, 'scan_for_deflate' : True},
description='Scan for raw deflate compression streams'),
Option(short='Z',
long='lzma',
kwargs={'enabled' : True, 'scan_for_lzma' : True},
description='Scan for raw LZMA compression streams'),
Option(short='P',
long='partial',
kwargs={'partial_scan' : True},
description='Perform a superficial, but faster, scan'),
Option(short='S',
long='stop',
kwargs={'stop_on_first_hit' : True},
description='Stop after the first result'),
]
KWARGS = [
Kwarg(name='enabled', default=False),
Kwarg(name='partial_scan', default=False),
Kwarg(name='stop_on_first_hit', default=False),
Kwarg(name='scan_for_deflate', default=False),
Kwarg(name='scan_for_lzma', default=False),
]
def init(self):
self.decompressors = []
if self.scan_for_deflate:
self.decompressors.append(Deflate(self))
if self.scan_for_lzma:
self.decompressors.append(LZMA(self))
def run(self):
for fp in iter(self.next_file, None):
file_done = False
self.header()
while not file_done:
(data, dlen) = fp.read_block()
if not data:
break
for i in range(0, dlen):
for decompressor in self.decompressors:
description = decompressor.decompress(data[i:i+decompressor.BLOCK_SIZE])
if description:
self.result(description=description, file=fp, offset=fp.tell()-dlen+i)
if self.stop_on_first_hit:
file_done = True
break
if file_done:
break
self.status.completed += 1
self.status.completed = fp.tell() - fp.offset
self.footer()
| WooyunDota/binwalk | src/binwalk/modules/compression.py | Python | mit | 9,259 |
#!/usr/bin/env python
"""Use clang-format and autopep8 when available to clean up the listed source
files."""
from __future__ import print_function
from optparse import OptionParser
import subprocess
import os
import sys
import multiprocessing
try:
from queue import Queue # python3
except ImportError:
from Queue import Queue # python2
from threading import Thread
try:
from shutil import which # python3.3 or later
except ImportError:
from distutils.spawn import find_executable as which
sys.path.append(os.path.split(sys.argv[0]))
import python_tools
from python_tools.reindent import Reindenter
parser = OptionParser(usage="%prog [options] [FILENAME ...]",
description="""Reformat the given C++ and Python files
(using the clang-format tool if available and
reindent.py, respectively). If the --all option is given, reformat all such
files under the current directory.
If the autopep8 tool is also available, it can be used instead of reindent.py
by giving the -a option. autopep8 is much more aggressive than reindent.py
and will fix other issues, such as use of old-style Python syntax.
""")
parser.add_option("-c", "--clang-format", dest="clang_format",
default="auto", metavar="EXE",
help="The clang-format command.")
parser.add_option("-a", dest="use_ap", action="store_true", default=False,
help="Use autopep8 rather than reindent.py for "
"Python files.")
parser.add_option("--all", dest="all_files", action="store_true",
default=False,
help="Reformat all files under current directory")
parser.add_option("--autopep8", dest="autopep8",
default="auto", metavar="EXE",
help="The autopep8 command.")
parser.add_option("-e", "--exclude", dest="exclude",
default="eigen3:config_templates", metavar="DIRS",
help="Colon-separated list of dirnames to ignore.")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
default=False,
help="Print extra info.")
(options, args) = parser.parse_args()
if not args and not options.all_files:
parser.error("No files selected")
# clang-format-3.4",
# autopep8
# search for executables
if options.clang_format == "auto":
options.clang_format = None
for name in ["clang-format-3.4", "clang-format"]:
if which(name):
options.clang_format = name
break
if options.autopep8 == "auto":
options.autopep8 = None
for name in ["autopep8"]:
if which(name):
options.autopep8 = name
break
exclude = options.exclude.split(":")
error = None
class _Worker(Thread):
"""Thread executing tasks from a given tasks queue"""
def __init__(self, tasks):
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
func, args, kargs = self.tasks.get()
try:
func(*args, **kargs)
except Exception as e:
print(e)
self.tasks.task_done()
class ThreadPool:
"""Pool of threads consuming tasks from a queue"""
def __init__(self, num_threads=-1):
if num_threads == -1:
num_threads = 2 * multiprocessing.cpu_count()
# print "Creating thread pool with", num_threads
self.tasks = Queue(-1)
for _ in range(num_threads):
_Worker(self.tasks)
def add_task(self, func, *args, **kargs):
"""Add a task to the queue"""
# func(*args, **kargs)
self.tasks.put((func, args, kargs))
def wait_completion(self):
"""Wait for completion of all the tasks in the queue"""
self.tasks.join()
return error
def _do_get_files(glb, cur):
matches = []
for n in os.listdir(cur):
if n in exclude:
continue
name = os.path.join(cur, n)
if os.path.isdir(name):
if not os.path.exists(os.path.join(name, ".git")):
matches += _do_get_files(glb, name)
elif name.endswith(glb):
matches.append(name)
return matches
def _get_files(glb):
match = []
if len(args) == 0:
match = _do_get_files(glb, ".")
else:
for a in args:
if os.path.isdir(a):
match += _do_get_files(glb, a)
elif a.endswith(glb):
match.append(a)
return match
def _run(cmd):
# print " ".join(cmd)
pro = subprocess.Popen(cmd, stderr=subprocess.PIPE,
stdout=subprocess.PIPE, universal_newlines=True)
output, error = pro.communicate()
if pro.returncode != 0:
print(" ".join(cmd))
raise RuntimeError("error running " + error)
return output
def clean_cpp(path):
# skip code that isn't ours
if "dependency" in path or "/eigen3/" in path:
return
if options.clang_format:
contents = _run([options.clang_format, "--style=Google", path])
else:
contents = open(path, "r").read()
contents = contents.replace("% template", "%template")
python_tools.rewrite(path, contents)
def clean_py(path):
if options.use_ap and options.autopep8:
contents = _run([options.autopep8, "--aggressive", "--aggressive",
path])
else:
r = Reindenter(open(path))
r.run()
contents = ''.join(r.after)
if contents.find("# \\example") != -1:
contents = "#" + contents
python_tools.rewrite(path, contents)
def main():
if options.verbose:
if options.autopep8 is None:
print("autopep8 not found")
else:
print("autopep8 is `%s`" % options.autopep8)
if options.clang_format is None:
print("clang-format not found")
else:
print("clang-format is `%s`" % options.clang_format)
tp = ThreadPool()
if args:
for f in args:
if f.endswith(".py"):
tp.add_task(clean_py, f)
elif f.endswith(".h") or f.endswith(".cpp"):
tp.add_task(clean_cpp, f)
elif options.all_files:
for f in _get_files(".py"):
tp.add_task(clean_py, f)
for f in _get_files(".h") + _get_files(".cpp"):
tp.add_task(clean_cpp, f)
tp.wait_completion()
if __name__ == '__main__':
main()
| salilab/rmf | tools/dev_tools/cleanup_code.py | Python | apache-2.0 | 6,511 |
import pandas as pd
import numpy as np
__author__ = 'Allison MacLeay'
def load_mnist_features(n):
csv_file = 'df_save_img_everything.csv'
print 'Loading {} records from haar dataset'.format(n)
df = pd.read_csv(csv_file)
del df['Unnamed: 0']
skr = set(np.random.choice(range(len(df)), size=n, replace=False)) # might not be unique
df = df[[idx in skr for idx in xrange(len(df))]]
return df
| alliemacleay/MachineLearning_CS6140 | Homeworks/HW6/__init__.py | Python | mit | 425 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
**kwargs: Any
) -> HttpRequest:
api_version = "2017-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/providers/Microsoft.DBforMySQL/operations')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class Operations(object):
"""Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.rdbms.mysql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> "_models.OperationListResult":
"""Lists all of the available REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationListResult, or the result of cls(response)
:rtype: ~azure.mgmt.rdbms.mysql.models.OperationListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OperationListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/providers/Microsoft.DBforMySQL/operations'} # type: ignore
| Azure/azure-sdk-for-python | sdk/rdbms/azure-mgmt-rdbms/azure/mgmt/rdbms/mysql/operations/_operations.py | Python | mit | 4,391 |
"""Qgram based tokenizer"""
from py_stringmatching import utils
from six.moves import xrange
from py_stringmatching.tokenizer.definition_tokenizer import DefinitionTokenizer
class QgramTokenizer(DefinitionTokenizer):
"""Qgram tokenizer class.
Parameters:
qval (int): Q-gram length (defaults to 2)
return_set (boolean): flag to indicate whether to return a set of
tokens. (defaults to False)
"""
def __init__(self, qval=2, return_set=False):
if qval < 1:
raise AssertionError("qval cannot be less than 1")
self.qval = qval
super(QgramTokenizer, self).__init__(return_set)
def tokenize(self, input_string):
"""
Tokenizes input string into q-grams.
A q-gram is defined as all sequences of q characters. Q-grams are also known as n-grams and
k-grams.
Args:
input_string (str): Input string
Returns:
Token list (list)
Raises:
TypeError : If the input is not a string
Examples:
>>> qg2_tok = QgramTokenizer()
>>> qg2_tok.tokenize('database')
['da','at','ta','ab','ba','as','se']
>>> qg2_tok.tokenize('a')
[]
>>> qg3_tok = QgramTokenizer(3)
>>> qg3_tok.tokenize('database')
['dat', 'ata', 'tab', 'aba', 'bas', 'ase']
"""
utils.tok_check_for_none(input_string)
utils.tok_check_for_string_input(input_string)
qgram_list = []
if len(input_string) < self.qval:
return qgram_list
qgram_list = [input_string[i:i + self.qval] for i in
xrange(len(input_string) - (self.qval - 1))]
qgram_list = list(filter(None, qgram_list))
if self.return_set:
return utils.convert_bag_to_set(qgram_list)
return qgram_list
def get_qval(self):
"""
Get Q-gram length
Returns:
Q-gram length (int)
"""
return self.qval
def set_qval(self, qval):
"""
Set Q-gram length
Args:
qval (int): Q-gram length
Raises:
AssertionError : If qval is less than 1
"""
if qval < 1:
raise AssertionError("qval cannot be less than 1")
self.qval = qval
return True
| Anson-Doan/py_stringmatching | py_stringmatching/tokenizer/qgram_tokenizer.py | Python | bsd-3-clause | 2,448 |
# Natural Language Toolkit: Confusion Matrices
#
# Copyright (C) 2001-2013 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, unicode_literals
from nltk.probability import FreqDist
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class ConfusionMatrix(object):
"""
The confusion matrix between a list of reference values and a
corresponding list of test values. Entry *[r,t]* of this
matrix is a count of the number of times that the reference value
*r* corresponds to the test value *t*. E.g.:
>>> from nltk.metrics import ConfusionMatrix
>>> ref = 'DET NN VB DET JJ NN NN IN DET NN'.split()
>>> test = 'DET VB VB DET NN NN NN IN DET NN'.split()
>>> cm = ConfusionMatrix(ref, test)
>>> print(cm['NN', 'NN'])
3
Note that the diagonal entries *Ri=Tj* of this matrix
corresponds to correct values; and the off-diagonal entries
correspond to incorrect values.
"""
def __init__(self, reference, test, sort_by_count=False):
"""
Construct a new confusion matrix from a list of reference
values and a corresponding list of test values.
:type reference: list
:param reference: An ordered list of reference values.
:type test: list
:param test: A list of values to compare against the
corresponding reference values.
:raise ValueError: If ``reference`` and ``length`` do not have
the same length.
"""
if len(reference) != len(test):
raise ValueError('Lists must have the same length.')
# Get a list of all values.
if sort_by_count:
ref_fdist = FreqDist(reference)
test_fdist = FreqDist(test)
def key(v): return -(ref_fdist[v]+test_fdist[v])
values = sorted(set(reference+test), key=key)
else:
values = sorted(set(reference+test))
# Construct a value->index dictionary
indices = dict((val,i) for (i,val) in enumerate(values))
# Make a confusion matrix table.
confusion = [[0 for val in values] for val in values]
max_conf = 0 # Maximum confusion
for w,g in zip(reference, test):
confusion[indices[w]][indices[g]] += 1
max_conf = max(max_conf, confusion[indices[w]][indices[g]])
#: A list of all values in ``reference`` or ``test``.
self._values = values
#: A dictionary mapping values in ``self._values`` to their indices.
self._indices = indices
#: The confusion matrix itself (as a list of lists of counts).
self._confusion = confusion
#: The greatest count in ``self._confusion`` (used for printing).
self._max_conf = max_conf
#: The total number of values in the confusion matrix.
self._total = len(reference)
#: The number of correct (on-diagonal) values in the matrix.
self._correct = sum(confusion[i][i] for i in range(len(values)))
def __getitem__(self, li_lj_tuple):
"""
:return: The number of times that value ``li`` was expected and
value ``lj`` was given.
:rtype: int
"""
(li, lj) = li_lj_tuple
i = self._indices[li]
j = self._indices[lj]
return self._confusion[i][j]
def __repr__(self):
return '<ConfusionMatrix: %s/%s correct>' % (self._correct,
self._total)
def __str__(self):
return self.pp()
def pp(self, show_percents=False, values_in_chart=True,
truncate=None, sort_by_count=False):
"""
:return: A multi-line string representation of this confusion matrix.
:type truncate: int
:param truncate: If specified, then only show the specified
number of values. Any sorting (e.g., sort_by_count)
will be performed before truncation.
:param sort_by_count: If true, then sort by the count of each
label in the reference data. I.e., labels that occur more
frequently in the reference label will be towards the left
edge of the matrix, and labels that occur less frequently
will be towards the right edge.
@todo: add marginals?
"""
confusion = self._confusion
values = self._values
if sort_by_count:
values = sorted(values, key=lambda v:
-sum(self._confusion[self._indices[v]]))
if truncate:
values = values[:truncate]
if values_in_chart:
value_strings = ["%s" % val for val in values]
else:
value_strings = [str(n+1) for n in range(len(values))]
# Construct a format string for row values
valuelen = max(len(val) for val in value_strings)
value_format = '%' + repr(valuelen) + 's | '
# Construct a format string for matrix entries
if show_percents:
entrylen = 6
entry_format = '%5.1f%%'
zerostr = ' .'
else:
entrylen = len(repr(self._max_conf))
entry_format = '%' + repr(entrylen) + 'd'
zerostr = ' '*(entrylen-1) + '.'
# Write the column values.
s = ''
for i in range(valuelen):
s += (' '*valuelen)+' |'
for val in value_strings:
if i >= valuelen-len(val):
s += val[i-valuelen+len(val)].rjust(entrylen+1)
else:
s += ' '*(entrylen+1)
s += ' |\n'
# Write a dividing line
s += '%s-+-%s+\n' % ('-'*valuelen, '-'*((entrylen+1)*len(values)))
# Write the entries.
for val, li in zip(value_strings, values):
i = self._indices[li]
s += value_format % val
for lj in values:
j = self._indices[lj]
if confusion[i][j] == 0:
s += zerostr
elif show_percents:
s += entry_format % (100.0*confusion[i][j]/self._total)
else:
s += entry_format % confusion[i][j]
if i == j:
prevspace = s.rfind(' ')
s = s[:prevspace] + '<' + s[prevspace+1:] + '>'
else: s += ' '
s += '|\n'
# Write a dividing line
s += '%s-+-%s+\n' % ('-'*valuelen, '-'*((entrylen+1)*len(values)))
# Write a key
s += '(row = reference; col = test)\n'
if not values_in_chart:
s += 'Value key:\n'
for i, value in enumerate(values):
s += '%6d: %s\n' % (i+1, value)
return s
def key(self):
values = self._values
str = 'Value key:\n'
indexlen = len(repr(len(values)-1))
key_format = ' %'+repr(indexlen)+'d: %s\n'
for i in range(len(values)):
str += key_format % (i, values[i])
return str
def demo():
reference = 'DET NN VB DET JJ NN NN IN DET NN'.split()
test = 'DET VB VB DET NN NN NN IN DET NN'.split()
print('Reference =', reference)
print('Test =', test)
print('Confusion matrix:')
print(ConfusionMatrix(reference, test))
print(ConfusionMatrix(reference, test).pp(sort_by_count=True))
if __name__ == '__main__':
demo()
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/pldata/nltk_3.0a3/nltk/metrics/confusionmatrix.py | Python | mit | 7,586 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Pexego Sistemas Informáticos All Rights Reserved
# $Jesús Ventosinos Mayor <jesus@pexego.es>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
from openerp.exceptions import except_orm
class purchase_order(models.Model):
_inherit = 'purchase.order'
picking_created = fields.Boolean('Picking created',
compute='is_picking_created')
@api.multi
def test_moves_done(self):
'''PO is done at the delivery side if all the incoming shipments
are done'''
for purchase in self:
for line in purchase.order_line:
for move in line.move_ids:
if move.state != 'done':
return False
return True
def is_picking_created(self):
self.picking_created = self.picking_ids and True or False
def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id,
group_id, context=None):
"""
prepare the stock move data from the PO line.
This function returns a list of dictionary ready to be used in
stock.move's create()
"""
purchase_line_obj = self.pool['purchase.order.line']
res = super(purchase_order, self)._prepare_order_line_move(
cr, uid, order, order_line, picking_id, group_id, context)
for move_dict in res:
move_dict.pop('picking_id', None)
move_dict.pop('product_uos_qty', None)
move_dict.pop('product_uos', None)
move_dict['partner_id'] = order.partner_id.id
if order.partner_ref:
move_dict['origin'] += ":" + order.partner_ref
return res
def action_picking_create(self, cr, uid, ids, context=None):
"""
Se sobreescribe la función para que no se cree el picking.
"""
for order in self.browse(cr, uid, ids):
self._create_stock_moves(cr, uid, order, order.order_line,
False, context=context)
def _create_stock_moves(self, cr, uid, order, order_lines,
picking_id=False, context=None):
"""
MOD: Se sobreescribe la función para no confirmar los movimientos.
"""
stock_move = self.pool.get('stock.move')
todo_moves = []
new_group = self.pool.get("procurement.group").create(
cr, uid, {'name': order.name, 'partner_id': order.partner_id.id},
context=context)
for order_line in order_lines:
if not order_line.product_id:
continue
if order_line.product_id.type in ('product', 'consu'):
for vals in self._prepare_order_line_move(
cr, uid, order, order_line, picking_id, new_group,
context=context):
move = stock_move.create(cr, uid, vals, context=context)
todo_moves.append(move)
def move_lines_create_picking(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
moves = self.pool('stock.move')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_receive_move')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
self_purchase = self.browse(cr, uid, ids)
move_lines = moves.search(cr, uid,
[('origin', 'like', self_purchase.name + '%'),
('picking_id', '=', False)],
context=context)
if len(move_lines) < 1:
raise except_orm(_('Warning'), _('There is any move line without associated picking'))
result['context'] = []
if len(move_lines) > 1:
result['domain'] = "[('id','in',[" + ','.join(map(str, move_lines)) + "])]"
else:
result['domain'] = "[('id','='," + str(move_lines[0]) + ")]"
return result
class purchase_order_line(models.Model):
_inherit = 'purchase.order.line'
@api.multi
def write(self, vals):
res = super(purchase_order_line, self).write(vals)
for line in self:
if line.move_ids and vals.get('date_planned', False):
for move in line.move_ids:
if move.state not in ['cancel',u'done'] and \
not move.container_id:
move.date_expected = vals['date_planned']
return res
| jgmanzanas/CMNT_004_15 | project-addons/purchase_picking/purchase.py | Python | agpl-3.0 | 5,513 |
# -*- coding: utf-8 -*-
import logging
log = logging.getLogger(__name__)
def init_single_entity(scenario, axes = None, enfants = None, famille = None, foyer_fiscal = None, menage = None, parent1 = None, parent2 = None, period = None):
if enfants is None:
enfants = []
assert parent1 is not None
familles = {}
foyers_fiscaux = {}
menages = {}
individus = {}
count_so_far = 0
for nth in range(0, 1):
famille_nth = famille.copy() if famille is not None else {}
foyer_fiscal_nth = foyer_fiscal.copy() if foyer_fiscal is not None else {}
menage_nth = menage.copy() if menage is not None else {}
group = [parent1, parent2] + (enfants or [])
for index, individu in enumerate(group):
if individu is None:
continue
id = individu.get('id')
if id is None:
individu = individu.copy()
id = 'ind{}'.format(index + count_so_far)
individus[id] = individu
if index <= 1:
famille_nth.setdefault('parents', []).append(id)
foyer_fiscal_nth.setdefault('declarants', []).append(id)
if index == 0:
menage_nth['personne_de_reference'] = id
else:
menage_nth['conjoint'] = id
else:
famille_nth.setdefault('enfants', []).append(id)
foyer_fiscal_nth.setdefault('personnes_a_charge', []).append(id)
menage_nth.setdefault('enfants', []).append(id)
count_so_far += len(group)
familles["f{}".format(nth)] = famille_nth
foyers_fiscaux["ff{}".format(nth)] = foyer_fiscal_nth
menages["m{}".format(nth)] = menage_nth
test_data = {
'period': period,
'familles': familles,
'foyers_fiscaux': foyers_fiscaux,
'menages': menages,
'individus': individus
}
if axes:
test_data['axes'] = axes
scenario.init_from_dict(test_data)
return scenario
| sgmap/openfisca-france | openfisca_france/scenarios.py | Python | agpl-3.0 | 2,063 |
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.utils.vars import merge_hash
class AggregateStats:
''' holds stats about per-host activity during playbook runs '''
def __init__(self):
self.processed = {}
self.failures = {}
self.ok = {}
self.dark = {}
self.changed = {}
self.skipped = {}
# user defined stats, which can be per host or global
self.custom = {}
def increment(self, what, host):
''' helper function to bump a statistic '''
self.processed[host] = 1
prev = (getattr(self, what)).get(host, 0)
getattr(self, what)[host] = prev+1
def summarize(self, host):
''' return information about a particular host '''
return dict(
ok = self.ok.get(host, 0),
failures = self.failures.get(host, 0),
unreachable = self.dark.get(host,0),
changed = self.changed.get(host, 0),
skipped = self.skipped.get(host, 0)
)
def set_custom_stats(self, which, what, host=None):
''' allow setting of a custom stat'''
if host is None:
host = '_run'
if host not in self.custom:
self.custom[host] = {which: what}
else:
self.custom[host][which] = what
def update_custom_stats(self, which, what, host=None):
''' allow aggregation of a custom stat'''
if host is None:
host = '_run'
if host not in self.custom or which not in self.custom[host]:
return self.set_custom_stats(which, what, host)
# mismatching types
if type(what) != type(self.custom[host][which]):
return None
if isinstance(what, dict):
self.custom[host][which] = merge_hash(self.custom[host][which], what)
else:
# let overloaded + take care of other types
self.custom[host][which] += what
| alvaroaleman/ansible | lib/ansible/executor/stats.py | Python | gpl-3.0 | 2,779 |
import sys
sys.path.insert(0, '..')
import patch_socket
import redis
print(redis.__file__)
try:
import redis._pack
except ImportError:
print('using pure python _pack_command')
else:
print('using cython _pack_command')
from redis.connection import Connection
class DummyConnectionPool(object):
def __init__(self):
self.conn = Connection()
def get_connection(self, *args, **kwargs):
return self.conn
def release(self, conn):
pass
def disconnect(self):
pass
def reinstantiate(self):
pass
pool = DummyConnectionPool()
pool.conn.connect()
rds = redis.StrictRedis(connection_pool=pool)
def bench():
'the operation for benchmark'
rds.set('test', 100)
def run_with_recording(sock, func):
sock.start_record()
func()
def run_with_replay(sock, func):
sock.start_replay()
func()
# record once
run_with_recording(pool.conn._sock, bench)
import timeit
timeit.main(['-s', 'from __main__ import run_with_replay, pool, bench',
'-n', '10000', 'run_with_replay(pool.conn._sock, bench)'])
import cProfile
if sys.version_info[0] >= 3:
xrange = range
cProfile.run('for i in xrange(10000):run_with_replay(pool.conn._sock, bench)',
sort='time')
| yihuang/redis-py | benchmarks/c_command_packer_benchmark.py | Python | mit | 1,265 |
from django.shortcuts import redirect
from social.pipeline.partial import partial
@partial
def require_email(strategy, details, user=None, is_new=False, *args, **kwargs):
if kwargs.get('ajax') or user and user.email:
return
elif is_new and not details.get('email'):
email = strategy.request_data().get('email')
if email:
details['email'] = email
else:
return redirect('require_email')
| aneumeier/userprofile | userprofile/pipeline.py | Python | mit | 452 |
from bson.objectid import ObjectId
import json
class Room():
def __init__(self, players_num, objectid, table, current_color='purple'):
if players_num:
self.players_num = players_num
else:
self.players_num = 0
for el in ['p', 'b', 'g', 'r']:
if el in table:
self.players_num += 1
self.objectid = objectid
self.current_color = current_color
self.players_dict = {}
self.alredy_ex = []
self.colors = []
self.winner = None
for col in ['p', 'b', 'g', 'r']:
if col in table:
self.colors.append(
{'p': 'purple',
'b': 'blue',
'g': 'green',
'r': 'red'}[col])
if current_color in self.colors:
self.current_color = current_color
else:
self.current_color = self.colors[0]
self.users_nicks = {}
self.color_player_dict = {'purple': None, 'blue': None, 'green': None, 'red': None}
self.player_color_dict = {}
self.status = 'waiting'
def get_player_by_color(self, color):
if color in self.color_player_dict:
return self.players_dict[self.color_player_dict[color]]
return None
def get_color_by_player(self, player_id):
if player_id in self.player_color_dict:
return self.player_color_dict[player_id]
return None
def add_player(self, player_id, name):
self.players_dict[player_id] = False
self.users_nicks[player_id] = name
for color in self.colors:
if not self.color_player_dict[color]:
self.color_player_dict[color] = player_id
self.player_color_dict[player_id] = color
break
def dell_player(self, player_id):
self.players_dict[player_id] = False
return self
def change_row(self, row, i, to):
return row[:i] + to + row[i + 1:]
def update_table(self, move, table):
print('Table updating')
pymove = json.loads(move)
pytable = json.loads(table)
print('Old table:')
for row in pytable:
print(' ', row)
x0, y0 = int(pymove['X0']), int(pymove['Y0'])
x1, y1 = int(pymove['X1']), int(pymove['Y1'])
print('Move from ({}, {}) to ({}, {})'.format(x0, y0, x1, y1))
if ((abs(x1 - x0) > 1) or (abs(y1 - y0) > 1)):
pytable[x0] = self.change_row(pytable[x0], y0, 'e')
for i in range(-1, 2):
for j in range(-1, 2):
if (x1 + i < len(pytable)) and (x1 + i > -1):
if (y1 + j < len(pytable[x1])) and (y1 + j > -1):
if pytable[x1 + i][y1 + j] != 'e':
pytable[x1 + i] = self.change_row(pytable[x1 + i], y1 + j, self.current_color[0].lower())
pytable[x1] = self.change_row(pytable[x1], y1, self.current_color[0].lower())
res = json.dumps(pytable)
if 'e' not in res:
r_count = (res.count('r'), 'red')
b_count = (res.count('b'), 'blue')
g_count = (res.count('g'), 'green')
p_count = (res.count('p'), 'purple')
sort_list = [r_count, b_count, p_count, g_count]
sort_list.sort()
self.winner = sort_list[-1][1]
print('New table:')
for row in pytable:
print(' ', row)
return res
def can_move(self, table):
pytable = json.loads(table)
for row_id, row in enumerate(pytable):
for char_id in range(len(row)):
char = row[char_id]
if char == self.current_color[0].lower():
for i in range(-2, 3):
for j in range(-2, 3):
if (row_id + i < len(pytable)) and (row_id + i > -1):
if (char_id + j < len(row)) and (char_id + j > -1):
if pytable[row_id + i][char_id + j] == 'e':
return True
return False
def change_color(self, table):
print('Сolor changing')
colors = self.colors
self.current_color = colors[
(colors.index(self.current_color) + 1) % self.players_num]
i = 1
while ((not self.players_dict[self.color_player_dict[self.current_color]]) or (not self.can_move(table))) and (i <= 5):
self.current_color = colors[
(colors.index(self.current_color) + 1) % self.players_num]
i += 1
if not self.can_move(table):
return None
return self.current_color
class RoomsManager():
def __init__(self, db):
# dict of rooms by their obj_id
self.db = db
self.rooms_dict = {}
def get_room(self, objectid):
if objectid not in self.rooms_dict:
rid = objectid
room_in_db = self.db.rooms.find_one({'_id': ObjectId(rid)})
if room_in_db:
print('Room', objectid, 'extrapolated from db')
new_room = Room(
int(room_in_db['players_num']), rid, room_in_db['table'])
new_room.current_color = room_in_db['current_color']
for user_id in room_in_db['players']:
player = room_in_db['players'][user_id]
new_room.color_player_dict[player['color']] = user_id
new_room.player_color_dict[user_id] = player['color']
new_room.users_nicks[user_id] = player['nick']
new_room.players_dict[user_id] = None
self.rooms_dict[rid] = new_room
else:
return None
return self.rooms_dict[objectid]
def add_room(self, room):
self.rooms_dict[room.objectid] = room
def rooms(self):
for objectid in self.rooms_dict:
yield self.rooms_dict[objectid]
| Andrey-Tkachev/infection | models/room.py | Python | mit | 6,066 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-02-09 09:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scoping', '0044_auto_20170209_0949'),
]
operations = [
migrations.AlterField(
model_name='doc',
name='UT',
field=models.CharField(db_index=True, max_length=240, primary_key=True, serialize=False),
),
]
| mcallaghan/tmv | BasicBrowser/scoping/migrations/0045_auto_20170209_0950.py | Python | gpl-3.0 | 495 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bitcamp.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| jerrrytan/bitcamp | bitcamp/manage.py | Python | mit | 250 |
# -*- coding: utf-8 -*-
# Asymmetric Base Framework - A collection of utilities for django frameworks
# Copyright (C) 2013 Asymmetric Ventures Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from __future__ import absolute_import, division, print_function, unicode_literals
from asymmetricbase.utils.cached_function import cached_function
__all__ = ('Role', 'AssignedRole', 'RoleTransfer', 'TypeAwareRoleManager', 'DefaultRole', 'OnlyRoleGroupProxy', 'HasTypeAwareRoleManager')
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth.models import Group, GroupManager
from django.core.exceptions import ValidationError, ImproperlyConfigured
from django.conf import settings
from django.db import models
from asymmetricbase.logging import logger
from .base import AsymBaseModel
from asymmetricbase.fields import LongNameField
# UserModel implement get_groups_query_string()
# this method should return a string that can be used in a queryset
# filter to access the user's groups. For example, the default User class
# would return 'groups', since if we want to filter by groups we would
# do User.objecst.filter('groups__in' = foo)
UserModel = getattr(settings, 'ASYM_ROLE_USER_MODEL')
usermodel_related_fields = getattr(settings, 'ASYM_ROLE_USER_MODEL_SELECT_RELATED', None)
@cached_function
def get_user_role_model():
''' Returns the model that the roles are attached to '''
try:
app_label, model_name = settings.ASYM_ROLE_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("ASYM_ROLE_USER_MODEL must be of the form 'app_label.model_name'")
user_model = models.get_model(app_label, model_name)
if user_model is None:
raise ImproperlyConfigured("ASYM_ROLE_USER_MODEL refers to model '{}' that has not been installed".format(settings.ASYM_ROLE_USER_MODEL))
return user_model
class TypeAwareRoleManager(models.Manager):
def __init__(self, model_content_type, *args, **kwargs):
super(TypeAwareRoleManager, self).__init__(*args, **kwargs)
self.model_content_type = model_content_type
def get_queryset(self):
return Role.objects.get_queryset() \
.filter(defined_for = self.model_content_type)
get_query_set = get_queryset
def HasTypeAwareRoleManager(content_type_model_name):
""" If a model has a TypeAwareRoleManager (TARM) as a field, and its baseclass defines "objects",
then the TARM will override this as the default manager. In some cases this is not what is wanted,
if, for example, the primary key of the model is a CharField. In this case, the TARM causes an insert
to fail as it uses AutoField() instead of CharField() causing it to issue an int() on the field.
"""
attrs = {
'assigned_roles' : generic.GenericRelation(AssignedRole),
'roles' : property(lambda self: TypeAwareRoleManager(model_content_type = ContentType.objects.filter(model = content_type_model_name)))
}
return type(str('{}TypeAwareRoleManager'.format(content_type_model_name.title())), (object,), attrs)
class RoleGroupProxyManager(GroupManager):
"""
Manager that returns only the Group objects that either ARE or ARE NOT a permission_group
for a Role object, depending on the parameter given at instantiation.
"""
def __init__(self, role_is_null, *args, **kwargs):
self.role_is_null = role_is_null
super(RoleGroupProxyManager, self).__init__(*args, **kwargs)
def get_queryset(self):
return Group.objects.filter(role__isnull = self.role_is_null)
get_query_set = get_queryset
class NotRoleGroupProxy(Group):
"""
Proxy model to provide only those Group objects who are NOT a permission_group
on a Role object.
"""
class Meta(object):
proxy = True
objects = RoleGroupProxyManager(role_is_null = True)
class OnlyRoleGroupProxy(Group):
"""
Proxy model to provide only those Group objects who ARE a permission_group
on a Role object.
"""
class Meta(object):
proxy = True
objects = RoleGroupProxyManager(role_is_null = False)
class Role(AsymBaseModel):
"""
A Role is defined for a model class, and specifies a set of permissions,
and a set of permitted groups, one of which a user must be in to be assigned this
Role.
Roles can be assigned to a User on a single object instance via AssignedRole.
"""
class Meta(object):
unique_together = (
('name', 'defined_for',),
)
app_label = 'shared'
name = LongNameField()
defined_for = models.ForeignKey(ContentType)
# limit permitted groups to those groups that are not permission_groups for other Roles
permitted_groups = models.ManyToManyField(Group, limit_choices_to = {'role__isnull' : True}, related_name = 'possible_roles')
permission_group = models.OneToOneField(Group)
def __str__(self):
return "{} defined on {}".format(self.name, self.defined_for.model_class().__name__)
@property
def permitted_users(self):
# looking at the .query for this QuerySet shows that you can, in fact,
# do a .filter(set_of_things__in = another_set_of_things)
user_role_model = get_user_role_model()
queryset = user_role_model.objects.filter(**{user_role_model.get_groups_query_string() + '__id__in': self.permitted_groups.all()}).distinct()
if usermodel_related_fields is not None:
return queryset.select_related(*usermodel_related_fields)
else:
return queryset
# the above is equivalent to:
# users = set()
# for g in self.permitted_groups.all():
# for u in g.user_set.all():
# users.add(u)
# return users
class AssignedRole(AsymBaseModel):
"""
For adding Users in specific roles to models.
"""
user = models.ForeignKey(UserModel, related_name = 'assigned_roles')
role = models.ForeignKey(Role, related_name = 'assignments')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey()
def __str__(self):
return "Role '{}' on '{}'".format(self.role, self.content_type)
def __copy__(self):
return AssignedRole(
user = self.user,
role = self.role,
content_object = self.content_object,
)
def save(self, *args, **kwargs):
"""
Check that role is defined on the content_type and user in permitted_groups.
"""
if self.role not in Role.objects.filter(defined_for = self.content_type):
raise ValidationError("'{}' is not defined on '{}'".format(self.role, self.content_type))
if self.user not in self.role.permitted_users:
raise ValidationError("'{}' is not permitted to be assigned to '{}'".format(self.user, self.role))
super(AssignedRole, self).save(*args, **kwargs)
class RoleTransfer(AsymBaseModel):
role_from = models.ForeignKey(Role, related_name = '+')
role_to = models.ForeignKey(Role, related_name = '+')
def __html__(self):
return "Transfer of {from.defined_for}.{from.name} to {to.defined_for}.{to.name}".format(**{'from' : self.role_from, 'to' : self.role_to})
def __str__(self):
return self.__html__()
@classmethod
def create(cls, from_model, to_model):
"""
`from_model` and `to_model` need to be instances of models.Model
"""
if type(from_model) == type(to_model):
logger.info("Tried to create a role transfer on identical models {} and {}".format(from_model, to_model))
return
new_assigned_roles = []
# All the transfers that could be made
possible_transfers = cls.objects.filter(role_from__defined_for = from_model.get_content_type(), role_to__defined_for = to_model.get_content_type())
for transfer in possible_transfers:
# See if there are any assigned roles on the from_model
assigned = AssignedRole.objects.filter(role = transfer.role_from, content_type = from_model.get_content_type(), object_id = from_model.id)
# Now copy the role to the to_model
for assigned_role in assigned:
new_assigned_role, created = AssignedRole.objects.get_or_create(
user = assigned_role.user,
role = transfer.role_to,
object_id = to_model.id,
content_type = to_model.get_content_type()
)
logger.info("Created ({}) assigned role {}/{} for user {}".format(created, new_assigned_role, transfer, assigned_role.user))
new_assigned_roles.append(new_assigned_role)
return new_assigned_roles
@classmethod
def check_groups(cls, from_role, to_role):
"""
Check that all permitted groups in from_role are also present on to_role.
Performing the transfer will fail if the groups are not present.
Return a list of messages or None if no errors.
"""
msg_list = []
for from_group in from_role.permitted_groups.all():
if from_group not in to_role.permitted_groups.all():
msg_list.append("""
The {group} group is not a Permitted Group on the {role} role defined on {to_model}. The role transfer could fail if created.
""".format(group = from_group.name, role = to_role.name, to_model = str(to_role.defined_for).title()))
return msg_list if len(msg_list) > 0 else None
class DefaultRole(AsymBaseModel):
"""
Couple a static Role name defined in settings with a Role object.
This allows renaming of the Roles while still being able to access it
by name in the code.
"""
identifier = models.IntegerField(unique = True)
role = models.ForeignKey(Role)
| AsymmetricVentures/asym-core | asymmetricbase/_models/roles.py | Python | gpl-2.0 | 9,821 |
import os
import zipfile
import requests
import pandas as pd
WALKING_DATASET = (
"https://archive.ics.uci.edu/ml/machine-learning-databases/00286/User%20Identification%20From%20Walking%20Activity.zip",
)
def download_data(path='data', urls=WALKING_DATASET):
if not os.path.exists(path):
os.mkdir(path)
for url in urls:
response = requests.get(url)
name = os.path.basename(url)
with open(os.path.join(path, name), 'wb') as f:
f.write(response.content)
if __name__ == "__main__":
download_data()
z = zipfile.ZipFile(os.path.join('data', 'User%20Identification%20From%20Walking%20Activity.zip'))
z.extractall(os.path.join('data', 'walking'))
# Concatenate all the separate files into a single file
PATH = os.path.join('data', 'walking','User Identification From Walking Activity')
columns = ["timestep", "x acceleration", "y acceleration", "z acceleration"]
allwalkers = pd.DataFrame(columns=columns)
for root, dirs, files in os.walk(PATH):
for file in files:
if file.endswith(".csv"):
walker = pd.read_csv(os.path.join(PATH, file), header=None)
walker.columns = columns
walker["walker_id"] = int(os.path.splitext(file)[0])
allwalkers = pd.concat([allwalkers, walker])
allwalkers.to_csv(os.path.join(PATH, "all_walkers.csv"), index=False)
| rebeccabilbro/rebeccabilbro.github.io | _drafts/get_walking_data.py | Python | mit | 1,420 |
import datetime
from unittest import mock
from django.db import IntegrityError, connection, transaction
from django.db.models import CheckConstraint, F, Func, Q
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import HotelReservation, RangesModel, Room
try:
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields import DateTimeRangeField, RangeBoundary, RangeOperators
from psycopg2.extras import DateRange, NumericRange
except ImportError:
pass
class SchemaTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_check_constraint_range_value(self):
constraint_name = 'ints_between'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = CheckConstraint(
check=Q(ints__contained_by=NumericRange(10, 30)),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(20, 50))
RangesModel.objects.create(ints=(10, 30))
def test_check_constraint_daterange_contains(self):
constraint_name = 'dates_contains'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = CheckConstraint(
check=Q(dates__contains=F('dates_inner')),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
date_1 = datetime.date(2016, 1, 1)
date_2 = datetime.date(2016, 1, 4)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2.replace(day=5)),
)
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2),
)
def test_check_constraint_datetimerange_contains(self):
constraint_name = 'timestamps_contains'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = CheckConstraint(
check=Q(timestamps__contains=F('timestamps_inner')),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
datetime_1 = datetime.datetime(2016, 1, 1)
datetime_2 = datetime.datetime(2016, 1, 2, 12)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2.replace(hour=13)),
)
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2),
)
class ExclusionConstraintTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_invalid_condition(self):
msg = 'ExclusionConstraint.condition must be a Q instance.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='GIST',
name='exclude_invalid_condition',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
condition=F('invalid'),
)
def test_invalid_index_type(self):
msg = 'Exclusion constraints only support GiST or SP-GiST indexes.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='gin',
name='exclude_invalid_index_type',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
)
def test_invalid_expressions(self):
msg = 'The expressions must be a list of 2-tuples.'
for expressions in (['foo'], [('foo')], [('foo_1', 'foo_2', 'foo_3')]):
with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='GIST',
name='exclude_invalid_expressions',
expressions=expressions,
)
def test_empty_expressions(self):
msg = 'At least one expression is required to define an exclusion constraint.'
for empty_expressions in (None, []):
with self.subTest(empty_expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='GIST',
name='exclude_empty_expressions',
expressions=empty_expressions,
)
def test_repr(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
(F('room'), RangeOperators.EQUAL),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type=GIST, expressions=["
"(F(datespan), '&&'), (F(room), '=')]>",
)
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)],
condition=Q(cancelled=False),
index_type='SPGiST',
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type=SPGiST, expressions=["
"(F(datespan), '-|-')], condition=(AND: ('cancelled', False))>",
)
def test_eq(self):
constraint_1 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
(F('room'), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
constraint_2 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
)
constraint_3 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
)
self.assertEqual(constraint_1, constraint_1)
self.assertEqual(constraint_1, mock.ANY)
self.assertNotEqual(constraint_1, constraint_2)
self.assertNotEqual(constraint_1, constraint_3)
self.assertNotEqual(constraint_2, constraint_3)
self.assertNotEqual(constraint_1, object())
def test_deconstruct(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
})
def test_deconstruct_index_type(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
index_type='SPGIST',
expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'index_type': 'SPGIST',
'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
})
def test_deconstruct_condition(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
condition=Q(cancelled=False),
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
'condition': Q(cancelled=False),
})
def _test_range_overlaps(self, constraint):
# Create exclusion constraint.
self.assertNotIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table))
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table))
# Add initial reservations.
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 26),
timezone.datetime(2018, 6, 28),
timezone.datetime(2018, 6, 29),
]
HotelReservation.objects.create(
datespan=DateRange(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
)
HotelReservation.objects.create(
datespan=DateRange(datetimes[1].date(), datetimes[3].date()),
start=datetimes[1],
end=datetimes[3],
room=room102,
)
# Overlap dates.
with self.assertRaises(IntegrityError), transaction.atomic():
reservation = HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
)
reservation.save()
# Valid range.
HotelReservation.objects.bulk_create([
# Other room.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room101,
),
# Cancelled reservation.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[1].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
cancelled=True,
),
# Other adjacent dates.
HotelReservation(
datespan=(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
),
])
def test_range_overlaps_custom(self):
class TsTzRange(Func):
function = 'TSTZRANGE'
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name='exclude_overlapping_reservations_custom',
expressions=[
(TsTzRange('start', 'end', RangeBoundary()), RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL)
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_overlaps(self):
constraint = ExclusionConstraint(
name='exclude_overlapping_reservations',
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL)
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_adjacent(self):
constraint_name = 'ints_adjacent'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
| kaedroho/django | tests/postgres_tests/test_constraints.py | Python | bsd-3-clause | 13,556 |
# -*- encoding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import attr
from attr import ib as Field
from addonpayments.api.common.requests import ApiRequest
from addonpayments.api.elements import Card, DccInfoWithRateType, DccInfoWithAmount
from addonpayments.api.mixins import FieldsAmountMixin, FieldsCommentMixin
from addonpayments.api.payment.requests import AuthRequest
@attr.s
class DccRate(FieldsAmountMixin, FieldsCommentMixin, ApiRequest):
"""
Class representing a DCC rate request to be sent to API.
"""
card = Field(default=None, validator=attr.validators.instance_of(Card))
dccinfo = Field(default=None, validator=attr.validators.instance_of(DccInfoWithRateType))
object_fields = ['card', 'dccinfo']
request_type = 'dccrate'
def get_hash_values(self):
"""
Override function to get necessary hash values for this request
:return: list
"""
return [self.timestamp, self.merchantid, self.orderid, self.amount, self.currency, self.card.number]
@attr.s
class AuthRequestWithDccInfo(AuthRequest):
"""
Class representing a authorisation with DCC info request to be sent to API.
"""
dccinfo = Field(default=None, validator=attr.validators.instance_of(DccInfoWithAmount))
object_fields = ['card', 'dccinfo']
| ComerciaGP/addonpayments-Python-SDK | addonpayments/api/dcc/requests.py | Python | mit | 1,336 |
import os
import sys
import transaction
from sqlalchemy import engine_from_config
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models import (
DBSession,
MyModel,
FocusModel,
Base,
)
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
with transaction.manager:
model = MyModel(name='one', value=1)
fmodel = FocusModel(focus=10, productivity=10, motivation=10, energy=9)
DBSession.add(model)
DBSession.add(fmodel)
| xydinesh/focus | focus/scripts/initializedb.py | Python | apache-2.0 | 1,032 |
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from library.preprocessing import ZCA
def transform(input, transform_method='StandardScaler'):
if transform_method == 'StandardScaler':
ss = StandardScaler()
elif transform_method == 'MinMaxScaler':
ss = MinMaxScaler()
else:
data = input
if transform_method != '':
data = ss.fit_transform(input)
return data
| sonapraneeth-a/object-classification | library/preprocessing/data_transform.py | Python | mit | 426 |
# Copyright 2016 Hewlett Packard Enterprise Development LP.
# Copyright 2016 Universidade Federal de Campina Grande
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from futurist import periodics
from ironic_lib import metrics_utils
from oslo_log import log as logging
import retrying
import six
from ironic.common import exception
from ironic.common.i18n import _, _LE, _LI, _LW
from ironic.common import states
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers.modules import agent
from ironic.drivers.modules import agent_base_vendor
from ironic.drivers.modules import deploy_utils as ironic_deploy_utils
from ironic.drivers.modules import iscsi_deploy
from ironic.drivers.modules.oneview import common
from ironic.drivers.modules.oneview import deploy_utils
from ironic import objects
LOG = logging.getLogger(__name__)
METRICS = metrics_utils.get_metrics_logger(__name__)
@six.add_metaclass(abc.ABCMeta)
class OneViewPeriodicTasks(object):
@abc.abstractproperty
def oneview_driver(self):
pass
@periodics.periodic(spacing=CONF.oneview.periodic_check_interval,
enabled=CONF.oneview.enable_periodic_tasks)
def _periodic_check_nodes_taken_by_oneview(self, manager, context):
"""Checks if nodes in Ironic were taken by OneView users.
This driver periodic task will check for nodes that were taken by
OneView users while the node is in available state, set the node to
maintenance mode with an appropriate maintenance reason message and
move the node to manageable state.
:param manager: a ConductorManager instance
:param context: request context
:returns: None.
"""
filters = {
'provision_state': states.AVAILABLE,
'maintenance': False,
'driver': self.oneview_driver
}
node_iter = manager.iter_nodes(filters=filters)
for node_uuid, driver in node_iter:
node = objects.Node.get(context, node_uuid)
try:
oneview_using = deploy_utils.is_node_in_use_by_oneview(
self.oneview_client, node
)
except exception.OneViewError as e:
# NOTE(xavierr): Skip this node and process the
# remaining nodes. This node will be checked in
# the next periodic call.
LOG.error(_LE("Error while determining if node "
"%(node_uuid)s is in use by OneView. "
"Error: %(error)s"),
{'node_uuid': node.uuid, 'error': e})
continue
if oneview_using:
purpose = (_LI('Updating node %(node_uuid)s in use '
'by OneView from %(provision_state)s state '
'to %(target_state)s state and maintenance '
'mode %(maintenance)s.'),
{'node_uuid': node_uuid,
'provision_state': states.AVAILABLE,
'target_state': states.MANAGEABLE,
'maintenance': True})
LOG.info(purpose)
node.maintenance = True
node.maintenance_reason = common.NODE_IN_USE_BY_ONEVIEW
manager.update_node(context, node)
manager.do_provisioning_action(context, node.uuid, 'manage')
@periodics.periodic(spacing=CONF.oneview.periodic_check_interval,
enabled=CONF.oneview.enable_periodic_tasks)
def _periodic_check_nodes_freed_by_oneview(self, manager, context):
"""Checks if nodes taken by OneView users were freed.
This driver periodic task will be responsible to poll the nodes that
are in maintenance mode and on manageable state to check if the Server
Profile was removed, indicating that the node was freed by the OneView
user. If so, it'll provide the node, that will pass through the
cleaning process and become available to be provisioned.
:param manager: a ConductorManager instance
:param context: request context
:returns: None.
"""
filters = {
'provision_state': states.MANAGEABLE,
'maintenance': True,
'driver': self.oneview_driver
}
node_iter = manager.iter_nodes(fields=['maintenance_reason'],
filters=filters)
for node_uuid, driver, maintenance_reason in node_iter:
if maintenance_reason == common.NODE_IN_USE_BY_ONEVIEW:
node = objects.Node.get(context, node_uuid)
try:
oneview_using = deploy_utils.is_node_in_use_by_oneview(
self.oneview_client, node
)
except exception.OneViewError as e:
# NOTE(xavierr): Skip this node and process the
# remaining nodes. This node will be checked in
# the next periodic call.
LOG.error(_LE("Error while determining if node "
"%(node_uuid)s is in use by OneView. "
"Error: %(error)s"),
{'node_uuid': node.uuid, 'error': e})
continue
if not oneview_using:
purpose = (_LI('Bringing node %(node_uuid)s back from '
'use by OneView from %(provision_state)s '
'state to %(target_state)s state and '
'maintenance mode %(maintenance)s.'),
{'node_uuid': node_uuid,
'provision_state': states.MANAGEABLE,
'target_state': states.AVAILABLE,
'maintenance': False})
LOG.info(purpose)
node.maintenance = False
node.maintenance_reason = None
manager.update_node(context, node)
manager.do_provisioning_action(
context, node.uuid, 'provide'
)
@periodics.periodic(spacing=CONF.oneview.periodic_check_interval,
enabled=CONF.oneview.enable_periodic_tasks)
def _periodic_check_nodes_taken_on_cleanfail(self, manager, context):
"""Checks failed deploys due to Oneview users taking Server Hardware.
This last driver periodic task will take care of nodes that would be
caught on a race condition between OneView and a deploy by Ironic. In
such cases, the validation will fail, throwing the node on deploy fail
and, afterwards on clean fail.
This task will set the node to maintenance mode with a proper reason
message and move it to manageable state, from where the second task
can rescue the node as soon as the Server Profile is removed.
:param manager: a ConductorManager instance
:param context: request context
:returns: None.
"""
filters = {
'provision_state': states.CLEANFAIL,
'driver': self.oneview_driver
}
node_iter = manager.iter_nodes(fields=['driver_internal_info'],
filters=filters)
for node_uuid, driver, driver_internal_info in node_iter:
node_oneview_error = driver_internal_info.get('oneview_error')
if node_oneview_error == common.SERVER_HARDWARE_ALLOCATION_ERROR:
node = objects.Node.get(context, node_uuid)
purpose = (_LI('Bringing node %(node_uuid)s back from use '
'by OneView from %(provision_state)s state '
'to %(target_state)s state and '
'maintenance mode %(maintenance)s.'),
{'node_uuid': node_uuid,
'provision_state': states.CLEANFAIL,
'target_state': states.MANAGEABLE,
'maintenance': False})
LOG.info(purpose)
node.maintenance = True
node.maintenance_reason = common.NODE_IN_USE_BY_ONEVIEW
driver_internal_info = node.driver_internal_info
driver_internal_info.pop('oneview_error', None)
node.driver_internal_info = driver_internal_info
manager.update_node(context, node)
manager.do_provisioning_action(context, node.uuid, 'manage')
class OneViewIscsiDeploy(iscsi_deploy.ISCSIDeploy, OneViewPeriodicTasks):
"""Class for OneView ISCSI deployment driver."""
oneview_driver = common.ISCSI_PXE_ONEVIEW
def __init__(self):
super(OneViewIscsiDeploy, self).__init__()
self.oneview_client = common.get_oneview_client()
def get_properties(self):
return deploy_utils.get_properties()
@METRICS.timer('OneViewIscsiDeploy.validate')
def validate(self, task):
common.verify_node_info(task.node)
try:
common.validate_oneview_resources_compatibility(
self.oneview_client, task)
except exception.OneViewError as oneview_exc:
raise exception.InvalidParameterValue(oneview_exc)
super(OneViewIscsiDeploy, self).validate(task)
@METRICS.timer('OneViewIscsiDeploy.prepare')
def prepare(self, task):
if common.is_dynamic_allocation_enabled(task.node):
deploy_utils.prepare(self.oneview_client, task)
super(OneViewIscsiDeploy, self).prepare(task)
@METRICS.timer('OneViewIscsiDeploy.tear_down')
def tear_down(self, task):
if (common.is_dynamic_allocation_enabled(task.node) and
not CONF.conductor.automated_clean):
deploy_utils.tear_down(self.oneview_client, task)
return super(OneViewIscsiDeploy, self).tear_down(task)
@METRICS.timer('OneViewIscsiDeploy.prepare_cleaning')
def prepare_cleaning(self, task):
if common.is_dynamic_allocation_enabled(task.node):
deploy_utils.prepare_cleaning(self.oneview_client, task)
return super(OneViewIscsiDeploy, self).prepare_cleaning(task)
@METRICS.timer('OneViewIscsiDeploy.tear_down_cleaning')
def tear_down_cleaning(self, task):
if common.is_dynamic_allocation_enabled(task.node):
deploy_utils.tear_down_cleaning(self.oneview_client, task)
super(OneViewIscsiDeploy, self).tear_down_cleaning(task)
# NOTE (thiagop): We overwrite this interface because we cannot change the boot
# device of OneView managed blades while they are still powered on. We moved
# the call of node_set_boot_device from reboot_to_instance to
# reboot_and_finish_deploy and changed the behavior to shutdown the node before
# doing it.
# TODO(thiagop): remove this interface once bug/1503855 is fixed
class OneViewAgentDeployMixin(object):
@METRICS.timer('OneViewAgentDeployMixin.reboot_to_instance')
def reboot_to_instance(self, task, **kwargs):
task.process_event('resume')
node = task.node
error = self.check_deploy_success(node)
if error is not None:
# TODO(jimrollenhagen) power off if using neutron dhcp to
# align with pxe driver?
msg = (_('node %(node)s command status errored: %(error)s') %
{'node': node.uuid, 'error': error})
LOG.error(msg)
ironic_deploy_utils.set_failed_state(task, msg)
return
LOG.info(_LI('Image successfully written to node %s'), node.uuid)
LOG.debug('Rebooting node %s to instance', node.uuid)
self.reboot_and_finish_deploy(task)
# NOTE(TheJulia): If we deployed a whole disk image, we
# should expect a whole disk image and clean-up the tftp files
# on-disk incase the node is disregarding the boot preference.
# TODO(rameshg87): Not all in-tree drivers using reboot_to_instance
# have a boot interface. So include a check for now. Remove this
# check once all in-tree drivers have a boot interface.
if task.driver.boot:
task.driver.boot.clean_up_ramdisk(task)
@METRICS.timer('OneViewAgentDeployMixin.reboot_and_finish_deploy')
def reboot_and_finish_deploy(self, task):
"""Helper method to trigger reboot on the node and finish deploy.
This method initiates a reboot on the node. On success, it
marks the deploy as complete. On failure, it logs the error
and marks deploy as failure.
:param task: a TaskManager object containing the node
:raises: InstanceDeployFailure, if node reboot failed.
"""
wait = CONF.agent.post_deploy_get_power_state_retry_interval * 1000
attempts = CONF.agent.post_deploy_get_power_state_retries + 1
@retrying.retry(
stop_max_attempt_number=attempts,
retry_on_result=lambda state: state != states.POWER_OFF,
wait_fixed=wait
)
def _wait_until_powered_off(task):
return task.driver.power.get_power_state(task)
node = task.node
try:
try:
self._client.power_off(node)
_wait_until_powered_off(task)
except Exception as e:
LOG.warning(
_LW('Failed to soft power off node %(node_uuid)s '
'in at least %(timeout)d seconds. Error: %(error)s'),
{'node_uuid': node.uuid,
'timeout': (wait * (attempts - 1)) / 1000,
'error': e})
manager_utils.node_power_action(task, states.POWER_OFF)
manager_utils.node_set_boot_device(task, 'disk',
persistent=True)
manager_utils.node_power_action(task, states.POWER_ON)
except Exception as e:
msg = (_('Error rebooting node %(node)s after deploy. '
'Error: %(error)s') %
{'node': node.uuid, 'error': e})
agent_base_vendor.log_and_raise_deployment_error(task, msg)
task.process_event('done')
LOG.info(_LI('Deployment to node %s done'), task.node.uuid)
class OneViewAgentDeploy(OneViewAgentDeployMixin, agent.AgentDeploy,
OneViewPeriodicTasks):
"""Class for OneView Agent deployment driver."""
oneview_driver = common.AGENT_PXE_ONEVIEW
def __init__(self):
super(OneViewAgentDeploy, self).__init__()
self.oneview_client = common.get_oneview_client()
def get_properties(self):
return deploy_utils.get_properties()
@METRICS.timer('OneViewAgentDeploy.validate')
def validate(self, task):
common.verify_node_info(task.node)
try:
common.validate_oneview_resources_compatibility(
self.oneview_client, task)
except exception.OneViewError as oneview_exc:
raise exception.InvalidParameterValue(oneview_exc)
super(OneViewAgentDeploy, self).validate(task)
@METRICS.timer('OneViewAgentDeploy.prepare')
def prepare(self, task):
if common.is_dynamic_allocation_enabled(task.node):
deploy_utils.prepare(self.oneview_client, task)
super(OneViewAgentDeploy, self).prepare(task)
@METRICS.timer('OneViewAgentDeploy.tear_down')
def tear_down(self, task):
if (common.is_dynamic_allocation_enabled(task.node) and
not CONF.conductor.automated_clean):
deploy_utils.tear_down(self.oneview_client, task)
return super(OneViewAgentDeploy, self).tear_down(task)
@METRICS.timer('OneViewAgentDeploy.prepare_cleaning')
def prepare_cleaning(self, task):
if common.is_dynamic_allocation_enabled(task.node):
deploy_utils.prepare_cleaning(self.oneview_client, task)
return super(OneViewAgentDeploy, self).prepare_cleaning(task)
@METRICS.timer('OneViewAgentDeploy.tear_down_cleaning')
def tear_down_cleaning(self, task):
if common.is_dynamic_allocation_enabled(task.node):
deploy_utils.tear_down_cleaning(self.oneview_client, task)
super(OneViewAgentDeploy, self).tear_down_cleaning(task)
| NaohiroTamura/ironic | ironic/drivers/modules/oneview/deploy.py | Python | apache-2.0 | 17,139 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C): 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Requirements
# - pyvmomi >= 6.0.0.2016.4
# TODO:
# * more jq examples
# * optional folder heriarchy
"""
$ jq '._meta.hostvars[].config' data.json | head
{
"alternateguestname": "",
"instanceuuid": "5035a5cd-b8e8-d717-e133-2d383eb0d675",
"memoryhotaddenabled": false,
"guestfullname": "Red Hat Enterprise Linux 7 (64-bit)",
"changeversion": "2016-05-16T18:43:14.977925Z",
"uuid": "4235fc97-5ddb-7a17-193b-9a3ac97dc7b4",
"cpuhotremoveenabled": false,
"vpmcenabled": false,
"firmware": "bios",
"""
from __future__ import print_function
import atexit
import datetime
import itertools
import json
import os
import re
import ssl
import sys
import uuid
from time import time
import six
from jinja2 import Environment
from six import integer_types, string_types
from six.moves import configparser
try:
import argparse
except ImportError:
sys.exit('Error: This inventory script required "argparse" python module. Please install it or upgrade to python-2.7')
try:
from pyVmomi import vim, vmodl
from pyVim.connect import SmartConnect, Disconnect
except ImportError:
sys.exit("ERROR: This inventory script required 'pyVmomi' Python module, it was not able to load it")
def regex_match(s, pattern):
'''Custom filter for regex matching'''
reg = re.compile(pattern)
if reg.match(s):
return True
else:
return False
def select_chain_match(inlist, key, pattern):
'''Get a key from a list of dicts, squash values to a single list, then filter'''
outlist = [x[key] for x in inlist]
outlist = list(itertools.chain(*outlist))
outlist = [x for x in outlist if regex_match(x, pattern)]
return outlist
class VMwareMissingHostException(Exception):
pass
class VMWareInventory(object):
__name__ = 'VMWareInventory'
guest_props = False
instances = []
debug = False
load_dumpfile = None
write_dumpfile = None
maxlevel = 1
lowerkeys = True
config = None
cache_max_age = None
cache_path_cache = None
cache_path_index = None
cache_dir = None
server = None
port = None
username = None
password = None
validate_certs = True
host_filters = []
skip_keys = []
groupby_patterns = []
safe_types = [bool, str, float, None] + list(integer_types)
iter_types = [dict, list]
bad_types = ['Array', 'disabledMethod', 'declaredAlarmState']
vimTableMaxDepth = {
"vim.HostSystem": 2,
"vim.VirtualMachine": 2,
}
custom_fields = {}
# use jinja environments to allow for custom filters
env = Environment()
env.filters['regex_match'] = regex_match
env.filters['select_chain_match'] = select_chain_match
# translation table for attributes to fetch for known vim types
vimTable = {
vim.Datastore: ['_moId', 'name'],
vim.ResourcePool: ['_moId', 'name'],
vim.HostSystem: ['_moId', 'name'],
}
@staticmethod
def _empty_inventory():
return {"_meta": {"hostvars": {}}}
def __init__(self, load=True):
self.inventory = VMWareInventory._empty_inventory()
if load:
# Read settings and parse CLI arguments
self.parse_cli_args()
self.read_settings()
# Check the cache
cache_valid = self.is_cache_valid()
# Handle Cache
if self.args.refresh_cache or not cache_valid:
self.do_api_calls_update_cache()
else:
self.debugl('loading inventory from cache')
self.inventory = self.get_inventory_from_cache()
def debugl(self, text):
if self.args.debug:
try:
text = str(text)
except UnicodeEncodeError:
text = text.encode('ascii', 'ignore')
print('%s %s' % (datetime.datetime.now(), text))
def show(self):
# Data to print
self.debugl('dumping results')
data_to_print = None
if self.args.host:
data_to_print = self.get_host_info(self.args.host)
elif self.args.list:
# Display list of instances for inventory
data_to_print = self.inventory
return json.dumps(data_to_print, indent=2)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
valid = False
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
valid = True
return valid
def do_api_calls_update_cache(self):
''' Get instances and cache the data '''
self.inventory = self.instances_to_inventory(self.get_instances())
self.write_to_cache(self.inventory)
def write_to_cache(self, data):
''' Dump inventory to json file '''
with open(self.cache_path_cache, 'wb') as f:
f.write(json.dumps(data))
def get_inventory_from_cache(self):
''' Read in jsonified inventory '''
jdata = None
with open(self.cache_path_cache, 'rb') as f:
jdata = f.read()
return json.loads(jdata)
def read_settings(self):
''' Reads the settings from the vmware_inventory.ini file '''
scriptbasename = __file__
scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '')
defaults = {'vmware': {
'server': '',
'port': 443,
'username': '',
'password': '',
'validate_certs': True,
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename),
'cache_name': 'ansible-vmware',
'cache_path': '~/.ansible/tmp',
'cache_max_age': 3600,
'max_object_level': 1,
'skip_keys': 'declaredalarmstate,'
'disabledmethod,'
'dynamicproperty,'
'dynamictype,'
'environmentbrowser,'
'managedby,'
'parent,'
'childtype,'
'resourceconfig',
'alias_pattern': '{{ config.name + "_" + config.uuid }}',
'host_pattern': '{{ guest.ipaddress }}',
'host_filters': '{{ runtime.powerstate == "poweredOn" }}',
'groupby_patterns': '{{ guest.guestid }},{{ "templates" if config.template else "guests"}}',
'lower_var_keys': True,
'custom_field_group_prefix': 'vmware_tag_',
'groupby_custom_field': False}
}
if six.PY3:
config = configparser.ConfigParser()
else:
config = configparser.SafeConfigParser()
# where is the config?
vmware_ini_path = os.environ.get('VMWARE_INI_PATH', defaults['vmware']['ini_path'])
vmware_ini_path = os.path.expanduser(os.path.expandvars(vmware_ini_path))
config.read(vmware_ini_path)
if 'vmware' not in config.sections():
config.add_section('vmware')
# apply defaults
for k, v in defaults['vmware'].items():
if not config.has_option('vmware', k):
config.set('vmware', k, str(v))
# where is the cache?
self.cache_dir = os.path.expanduser(config.get('vmware', 'cache_path'))
if self.cache_dir and not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
# set the cache filename and max age
cache_name = config.get('vmware', 'cache_name')
self.cache_path_cache = self.cache_dir + "/%s.cache" % cache_name
self.debugl('cache path is %s' % self.cache_path_cache)
self.cache_max_age = int(config.getint('vmware', 'cache_max_age'))
# mark the connection info
self.server = os.environ.get('VMWARE_SERVER', config.get('vmware', 'server'))
self.debugl('server is %s' % self.server)
self.port = int(os.environ.get('VMWARE_PORT', config.get('vmware', 'port')))
self.username = os.environ.get('VMWARE_USERNAME', config.get('vmware', 'username'))
self.debugl('username is %s' % self.username)
self.password = os.environ.get('VMWARE_PASSWORD', config.get('vmware', 'password', raw=True))
self.validate_certs = os.environ.get('VMWARE_VALIDATE_CERTS', config.get('vmware', 'validate_certs'))
if self.validate_certs in ['no', 'false', 'False', False]:
self.validate_certs = False
self.debugl('cert validation is %s' % self.validate_certs)
# behavior control
self.maxlevel = int(config.get('vmware', 'max_object_level'))
self.debugl('max object level is %s' % self.maxlevel)
self.lowerkeys = config.get('vmware', 'lower_var_keys')
if type(self.lowerkeys) != bool:
if str(self.lowerkeys).lower() in ['yes', 'true', '1']:
self.lowerkeys = True
else:
self.lowerkeys = False
self.debugl('lower keys is %s' % self.lowerkeys)
self.skip_keys = list(config.get('vmware', 'skip_keys').split(','))
self.debugl('skip keys is %s' % self.skip_keys)
temp_host_filters = list(config.get('vmware', 'host_filters').split('}},'))
for host_filter in temp_host_filters:
host_filter = host_filter.rstrip()
if host_filter != "":
if not host_filter.endswith("}}"):
host_filter += "}}"
self.host_filters.append(host_filter)
self.debugl('host filters are %s' % self.host_filters)
temp_groupby_patterns = list(config.get('vmware', 'groupby_patterns').split('}},'))
for groupby_pattern in temp_groupby_patterns:
groupby_pattern = groupby_pattern.rstrip()
if groupby_pattern != "":
if not groupby_pattern.endswith("}}"):
groupby_pattern += "}}"
self.groupby_patterns.append(groupby_pattern)
self.debugl('groupby patterns are %s' % self.groupby_patterns)
# Special feature to disable the brute force serialization of the
# virtulmachine objects. The key name for these properties does not
# matter because the values are just items for a larger list.
if config.has_section('properties'):
self.guest_props = []
for prop in config.items('properties'):
self.guest_props.append(prop[1])
# save the config
self.config = config
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on PyVmomi')
parser.add_argument('--debug', action='store_true', default=False,
help='show debug info')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to VSphere (default: False - use cache files)')
parser.add_argument('--max-instances', default=None, type=int,
help='maximum number of instances to retrieve')
self.args = parser.parse_args()
def get_instances(self):
''' Get a list of vm instances with pyvmomi '''
kwargs = {'host': self.server,
'user': self.username,
'pwd': self.password,
'port': int(self.port)}
if hasattr(ssl, 'SSLContext') and not self.validate_certs:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
kwargs['sslContext'] = context
return self._get_instances(kwargs)
def _get_instances(self, inkwargs):
''' Make API calls '''
instances = []
try:
si = SmartConnect(**inkwargs)
except ssl.SSLError as connection_error:
if '[SSL: CERTIFICATE_VERIFY_FAILED]' in str(connection_error) and self.validate_certs:
sys.exit("Unable to connect to ESXi server due to %s, "
"please specify validate_certs=False and try again" % connection_error)
except Exception as exc:
self.debugl("Unable to connect to ESXi server due to %s" % exc)
sys.exit("Unable to connect to ESXi server due to %s" % exc)
self.debugl('retrieving all instances')
if not si:
sys.exit("Could not connect to the specified host using specified "
"username and password")
atexit.register(Disconnect, si)
content = si.RetrieveContent()
# Create a search container for virtualmachines
self.debugl('creating containerview for virtualmachines')
container = content.rootFolder
viewType = [vim.VirtualMachine]
recursive = True
containerView = content.viewManager.CreateContainerView(container, viewType, recursive)
children = containerView.view
for child in children:
# If requested, limit the total number of instances
if self.args.max_instances:
if len(instances) >= self.args.max_instances:
break
instances.append(child)
self.debugl("%s total instances in container view" % len(instances))
if self.args.host:
instances = [x for x in instances if x.name == self.args.host]
instance_tuples = []
for instance in sorted(instances):
if self.guest_props:
ifacts = self.facts_from_proplist(instance)
else:
ifacts = self.facts_from_vobj(instance)
instance_tuples.append((instance, ifacts))
self.debugl('facts collected for all instances')
try:
cfm = content.customFieldsManager
if cfm is not None and cfm.field:
for f in cfm.field:
if f.managedObjectType == vim.VirtualMachine:
self.custom_fields[f.key] = f.name
self.debugl('%d custom fields collected' % len(self.custom_fields))
except vmodl.RuntimeFault as exc:
self.debugl("Unable to gather custom fields due to %s" % exc.msg)
except IndexError as exc:
self.debugl("Unable to gather custom fields due to %s" % exc)
return instance_tuples
def instances_to_inventory(self, instances):
''' Convert a list of vm objects into a json compliant inventory '''
self.debugl('re-indexing instances based on ini settings')
inventory = VMWareInventory._empty_inventory()
inventory['all'] = {}
inventory['all']['hosts'] = []
for idx, instance in enumerate(instances):
# make a unique id for this object to avoid vmware's
# numerous uuid's which aren't all unique.
thisid = str(uuid.uuid4())
idata = instance[1]
# Put it in the inventory
inventory['all']['hosts'].append(thisid)
inventory['_meta']['hostvars'][thisid] = idata.copy()
inventory['_meta']['hostvars'][thisid]['ansible_uuid'] = thisid
# Make a map of the uuid to the alias the user wants
name_mapping = self.create_template_mapping(
inventory,
self.config.get('vmware', 'alias_pattern')
)
# Make a map of the uuid to the ssh hostname the user wants
host_mapping = self.create_template_mapping(
inventory,
self.config.get('vmware', 'host_pattern')
)
# Reset the inventory keys
for k, v in name_mapping.items():
if not host_mapping or k not in host_mapping:
continue
# set ansible_host (2.x)
try:
inventory['_meta']['hostvars'][k]['ansible_host'] = host_mapping[k]
# 1.9.x backwards compliance
inventory['_meta']['hostvars'][k]['ansible_ssh_host'] = host_mapping[k]
except Exception:
continue
if k == v:
continue
# add new key
inventory['all']['hosts'].append(v)
inventory['_meta']['hostvars'][v] = inventory['_meta']['hostvars'][k]
# cleanup old key
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('pre-filtered hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Apply host filters
for hf in self.host_filters:
if not hf:
continue
self.debugl('filter: %s' % hf)
filter_map = self.create_template_mapping(inventory, hf, dtype='boolean')
for k, v in filter_map.items():
if not v:
# delete this host
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('post-filter hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Create groups
for gbp in self.groupby_patterns:
groupby_map = self.create_template_mapping(inventory, gbp)
for k, v in groupby_map.items():
if v not in inventory:
inventory[v] = {}
inventory[v]['hosts'] = []
if k not in inventory[v]['hosts']:
inventory[v]['hosts'].append(k)
if self.config.get('vmware', 'groupby_custom_field'):
for k, v in inventory['_meta']['hostvars'].items():
if 'customvalue' in v:
for tv in v['customvalue']:
newkey = None
field_name = self.custom_fields[tv['key']] if tv['key'] in self.custom_fields else tv['key']
values = []
keylist = map(lambda x: x.strip(), tv['value'].split(','))
for kl in keylist:
try:
newkey = "%s%s_%s" % (self.config.get('vmware', 'custom_field_group_prefix'), str(field_name), kl)
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
values.append(newkey)
for tag in values:
if not tag:
continue
if tag not in inventory:
inventory[tag] = {}
inventory[tag]['hosts'] = []
if k not in inventory[tag]['hosts']:
inventory[tag]['hosts'].append(k)
return inventory
def create_template_mapping(self, inventory, pattern, dtype='string'):
''' Return a hash of uuid to templated string from pattern '''
mapping = {}
for k, v in inventory['_meta']['hostvars'].items():
t = self.env.from_string(pattern)
newkey = None
try:
newkey = t.render(v)
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
if not newkey:
continue
elif dtype == 'integer':
newkey = int(newkey)
elif dtype == 'boolean':
if newkey.lower() == 'false':
newkey = False
elif newkey.lower() == 'true':
newkey = True
elif dtype == 'string':
pass
mapping[k] = newkey
return mapping
def facts_from_proplist(self, vm):
'''Get specific properties instead of serializing everything'''
rdata = {}
for prop in self.guest_props:
self.debugl('getting %s property for %s' % (prop, vm.name))
key = prop
if self.lowerkeys:
key = key.lower()
if '.' not in prop:
# props without periods are direct attributes of the parent
vm_property = getattr(vm, prop)
if isinstance(vm_property, vim.CustomFieldsManager.Value.Array):
temp_vm_property = []
for vm_prop in vm_property:
temp_vm_property.append({'key': vm_prop.key,
'value': vm_prop.value})
rdata[key] = temp_vm_property
else:
rdata[key] = vm_property
else:
# props with periods are subkeys of parent attributes
parts = prop.split('.')
total = len(parts) - 1
# pointer to the current object
val = None
# pointer to the current result key
lastref = rdata
for idx, x in enumerate(parts):
if isinstance(val, dict):
if x in val:
val = val.get(x)
elif x.lower() in val:
val = val.get(x.lower())
else:
# if the val wasn't set yet, get it from the parent
if not val:
try:
val = getattr(vm, x)
except AttributeError as e:
self.debugl(e)
else:
# in a subkey, get the subprop from the previous attrib
try:
val = getattr(val, x)
except AttributeError as e:
self.debugl(e)
# make sure it serializes
val = self._process_object_types(val)
# lowercase keys if requested
if self.lowerkeys:
x = x.lower()
# change the pointer or set the final value
if idx != total:
if x not in lastref:
lastref[x] = {}
lastref = lastref[x]
else:
lastref[x] = val
return rdata
def facts_from_vobj(self, vobj, level=0):
''' Traverse a VM object and return a json compliant data structure '''
# pyvmomi objects are not yet serializable, but may be one day ...
# https://github.com/vmware/pyvmomi/issues/21
# WARNING:
# Accessing an object attribute will trigger a SOAP call to the remote.
# Increasing the attributes collected or the depth of recursion greatly
# increases runtime duration and potentially memory+network utilization.
if level == 0:
try:
self.debugl("get facts for %s" % vobj.name)
except Exception as e:
self.debugl(e)
rdata = {}
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if x not in self.bad_types]
methods = [x for x in methods if not x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
# Skip callable methods
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
rdata[method] = self._process_object_types(
methodToCall,
thisvm=vobj,
inkey=method,
)
return rdata
def _process_object_types(self, vobj, thisvm=None, inkey='', level=0):
''' Serialize an object '''
rdata = {}
if type(vobj).__name__ in self.vimTableMaxDepth and level >= self.vimTableMaxDepth[type(vobj).__name__]:
return rdata
if vobj is None:
rdata = None
elif type(vobj) in self.vimTable:
rdata = {}
for key in self.vimTable[type(vobj)]:
try:
rdata[key] = getattr(vobj, key)
except Exception as e:
self.debugl(e)
elif issubclass(type(vobj), str) or isinstance(vobj, str):
if vobj.isalnum():
rdata = vobj
else:
rdata = vobj.decode('ascii', 'ignore')
elif issubclass(type(vobj), bool) or isinstance(vobj, bool):
rdata = vobj
elif issubclass(type(vobj), integer_types) or isinstance(vobj, integer_types):
rdata = vobj
elif issubclass(type(vobj), float) or isinstance(vobj, float):
rdata = vobj
elif issubclass(type(vobj), list) or issubclass(type(vobj), tuple):
rdata = []
try:
vobj = sorted(vobj)
except Exception:
pass
for idv, vii in enumerate(vobj):
if level + 1 <= self.maxlevel:
vid = self._process_object_types(
vii,
thisvm=thisvm,
inkey=inkey + '[' + str(idv) + ']',
level=(level + 1)
)
if vid:
rdata.append(vid)
elif issubclass(type(vobj), dict):
pass
elif issubclass(type(vobj), object):
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if x not in self.bad_types]
methods = [x for x in methods if not inkey + '.' + x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
if level + 1 <= self.maxlevel:
try:
rdata[method] = self._process_object_types(
methodToCall,
thisvm=thisvm,
inkey=inkey + '.' + method,
level=(level + 1)
)
except vim.fault.NoPermission:
self.debugl("Skipping method %s (NoPermission)" % method)
else:
pass
return rdata
def get_host_info(self, host):
''' Return hostvars for a single host '''
if host in self.inventory['_meta']['hostvars']:
return self.inventory['_meta']['hostvars'][host]
elif self.args.host and self.inventory['_meta']['hostvars']:
match = None
for k, v in self.inventory['_meta']['hostvars'].items():
if self.inventory['_meta']['hostvars'][k]['name'] == self.args.host:
match = k
break
if match:
return self.inventory['_meta']['hostvars'][match]
else:
raise VMwareMissingHostException('%s not found' % host)
else:
raise VMwareMissingHostException('%s not found' % host)
if __name__ == "__main__":
# Run the script
print(VMWareInventory().show())
| GheRivero/ansible | contrib/inventory/vmware_inventory.py | Python | gpl-3.0 | 28,632 |
# -*- coding: utf-8 -*-
import argparse
import logging
import requests
import sys
import os
import glob
import imp
import inspect
from dogen.generator import Generator
from dogen.version import version
from dogen.errors import Error
from dogen.plugin import Plugin
import colorlog
# Source: http://stackoverflow.com/questions/1383254/logging-streamhandler-and-standard-streams
# Adjusted
class SingleLevelFilter(logging.Filter):
def __init__(self, passlevel, reject):
self.passlevel = passlevel
self.reject = reject
def filter(self, record):
if self.reject:
return (record.levelno > self.passlevel)
else:
return (record.levelno <= self.passlevel)
class MyParser(argparse.ArgumentParser):
def error(self, message):
self.print_help()
sys.stderr.write('\nError: %s\n' % message)
sys.exit(2)
class CLI(object):
def __init__(self):
handler_out = logging.StreamHandler(sys.stdout)
handler_err = logging.StreamHandler(sys.stderr)
handler_out.addFilter(SingleLevelFilter(logging.INFO, False))
handler_err.addFilter(SingleLevelFilter(logging.INFO, True))
formatter = colorlog.ColoredFormatter(
'%(log_color)s%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler_out.setFormatter(formatter)
handler_err.setFormatter(formatter)
self.log = logging.getLogger("dogen")
self.log.addHandler(handler_out)
self.log.addHandler(handler_err)
for package in ["requests.packages.urllib3", "pykwalify.rule"]:
log = logging.getLogger(package)
log.setLevel(logging.INFO)
requests.packages.urllib3.disable_warnings()
def run(self):
parser = MyParser(
description='Dockerfile generator tool', formatter_class=argparse.RawDescriptionHelpFormatter)
epilog = "List of available plugins:\n"
plugins = self.get_plugins()
parser.add_argument(
'-v', '--verbose', action='store_true', help='Verbose output')
parser.add_argument(
'--version', action='version', help='Show version and exit', version=version)
parser.add_argument('--without-sources', '--ws', action='store_true', help='Do not process sources, only generate Dockerfile')
parser.add_argument('--skip-ssl-verification', action='store_true', help='Should we skip SSL verification when retrieving data?')
parser.add_argument('--scripts-path', help='Location of the scripts directory containing script packages.')
parser.add_argument('--template', help='Path to custom template (can be url)')
parser.add_argument('path', help="Path to yaml descriptor to process")
parser.add_argument('output', help="Path to directory where generated files should be saved")
for plugin in plugins:
key, description = plugins[plugin].info()
epilog += "\n * %s:\t%s" % (key, description)
parser = plugins[plugin].inject_args(parser)
parser.epilog = epilog
args = parser.parse_args()
if args.verbose:
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.INFO)
self.log.debug("Running version %s", version)
enabled_plugins = []
for plugin in plugins:
enabled_plugins.append(plugins[plugin])
try:
Generator(self.log, args=args, plugins=enabled_plugins).run()
except KeyboardInterrupt as e:
pass
except Error as e:
if args.verbose:
self.log.exception(e)
else:
self.log.error(str(e))
sys.exit(1)
def get_plugins(self):
"""
Finds all modules in the subdirs of directory
"""
modules = {}
directory = os.path.join(os.path.dirname(__file__), "plugins")
for candidate in glob.glob(directory + os.sep + "*py"):
self.log.debug("inspecting %s" %candidate)
module_name = "dogen.plugins"
self.log.debug("importing module %s to %s" % (os.path.abspath(candidate), module_name))
module = imp.load_source(module_name, os.path.abspath(candidate))
# Get all classes from our module
for name, clazz in inspect.getmembers(module, inspect.isclass):
# Check that class is from our namespace
if module_name == clazz.__module__:
# Instantiate class
cls = getattr(module, name)
if issubclass(cls, Plugin):
self.log.info("found %s" %cls)
modules[cls.__name__] = cls
return modules
def run():
cli = CLI()
cli.run()
if __name__ == "__main__":
run()
| goldmann/dogen | dogen/cli.py | Python | mit | 4,867 |
# Based on lazy pirate pattern
import zmq
import sys
import numpy
def recv_array(socket, flags=0, copy=True, track=False):
"""recv a numpy array"""
md = socket.recv_json(flags=flags)
msg = socket.recv(flags=flags, copy=copy, track=track)
buf = buffer(msg)
A = numpy.frombuffer(buf, dtype=md['dtype'])
return A.reshape(md['shape'])
REQUEST_TIMEOUT = 2500
REQUEST_RETRIES = 3
SERVER_ENDPOINT = "tcp://localhost:5555"
context = zmq.Context()
# Socket to talk to server
print "Connecting to hello world server..."
client = context.socket(zmq.REQ)
client.connect(SERVER_ENDPOINT)
poll = zmq.Poller()
poll.register(client, zmq.POLLIN)
retries_left = REQUEST_RETRIES
while retries_left:
shots_requested = 1
request = str(shots_requested) # ask for one shot of data
print "I: Sending (%s)" % request
client.send(request)
expect_reply = True
while expect_reply:
socks = dict(poll.poll(REQUEST_TIMEOUT))
if socks.get(client) == zmq.POLLIN:
reply = client.recv()
if not reply:
break
if int(reply) == shots_requested:
print "I: Server replied OK (%s)" % reply
retries_left = REQUEST_RETRIES
expect_reply = False
else:
print "E: Malformed reply from server: %s" % reply
else:
print "W: No response from server, retrying..."
# Socket is confused. Close and remove it.
client.setsockopt(zmq.LINGER, 0)
client.close()
poll.unregister(client)
retries_left -= 1
if retries_left == 0:
print "E: Server seems to be offline, abandoning"
break
print "I: Reconnecting and resending (%s)" % request
# Create new connection
client = context.socket(zmq.REQ)
client.connect(SERVER_ENDPOINT)
poll.register(client, zmq.POLLIN)
client.send(request)
context.term()
| DawesLab/Camserver | TestClient.py | Python | mit | 2,036 |
"""add table mailtemplates
Revision ID: 34f15d11d02
Revises: 2fadbf7a01a
Create Date: 2015-08-04 17:28:24.372803
"""
# revision identifiers, used by Alembic.
revision = '34f15d11d02'
down_revision = '2fadbf7a01a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('mailtemplates',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('subject', sa.String(length=79), nullable=False),
sa.Column('html', sa.Text(), nullable=False),
sa.Column('help_msg', sa.Text(), nullable=True),
sa.Column('updated_at', sa.Date(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('mailtemplates')
### end Alembic commands ###
| uaprom-summer-2015/Meowth | migrations/versions/2015_08_04_34f1_add_table_mailtemplates.py | Python | bsd-3-clause | 1,045 |
# -*- coding: utf-8 -*-
"""Building and simulating spiking neural networks using Brian2.
@author: rbodo
"""
import warnings
import numpy as np
import os
from tensorflow.keras.models import load_model
from snntoolbox.parsing.utils import get_type
from snntoolbox.simulation.utils import AbstractSNN, get_shape_from_label, \
build_convolution, build_pooling, get_ann_ops
from snntoolbox.utils.utils import confirm_overwrite
class SNN(AbstractSNN):
"""
Represents the compiled spiking neural network, ready for testing in a
spiking simulator.
Attributes
----------
layers: list[brian2.NeuronGroup]
Each entry represents a layer, i.e. a population of neurons, in form of
Brian2 ``NeuronGroup`` objects.
connections: list[brian2.Synapses]
Brian2 ``Synapses`` objects representing the connections between
individual layers.
threshold: str
Defines spiking threshold.
v_reset: str
Defines reset potential.
eqs: str
Differential equation for membrane potential.
spikemonitors: list[brian2.SpikeMonitor]
Brian2 ``SpikeMonitor`` s for each layer that records spikes.
statemonitors: list[brian2.StateMonitor]
Brian2 ``StateMonitor`` s for each layer that records membrane
potential.
snn: brian2.Network
The spiking network.
"""
def __init__(self, config, queue=None):
AbstractSNN.__init__(self, config, queue)
self.layers = []
self.connections = [] # Final container for all layers.
self.threshold = 'v >= v_thresh'
if 'subtraction' in config.get('cell', 'reset'):
self.v_reset = 'v = v - v_thresh'
else:
self.v_reset = 'v = v_reset'
self.eqs = '''dv/dt = bias : 1
bias : hertz'''
self.spikemonitors = []
self.statemonitors = []
self.snn = None
self._input_layer = None
self._cell_params = None
# Track the output layer spikes.
self.output_spikemonitor = None
@property
def is_parallelizable(self):
return False
def add_input_layer(self, input_shape):
if self._poisson_input:
self.layers.append(self.sim.PoissonGroup(
np.prod(input_shape[1:]), rates=0*self.sim.Hz,
dt=self._dt*self.sim.ms))
else:
self.layers.append(self.sim.NeuronGroup(
np.prod(input_shape[1:]), model=self.eqs, method='euler',
reset=self.v_reset, threshold=self.threshold,
dt=self._dt * self.sim.ms))
self.layers[0].add_attribute('label')
self.layers[0].label = 'InputLayer'
self.spikemonitors.append(self.sim.SpikeMonitor(self.layers[0]))
# Need placeholders "None" for layers without states:
self.statemonitors.append(self.sim.StateMonitor(self.layers[0], [],
False))
def add_layer(self, layer):
# Latest Keras versions need special permutation after Flatten layers.
if 'Flatten' in layer.__class__.__name__ and \
self.config.get('input', 'model_lib') == 'keras':
self.flatten_shapes.append(
(layer.name, get_shape_from_label(self.layers[-1].label)))
return
self.layers.append(self.sim.NeuronGroup(
np.prod(layer.output_shape[1:]), model=self.eqs, method='euler',
reset=self.v_reset, threshold=self.threshold,
dt=self._dt * self.sim.ms))
self.connections.append(self.sim.Synapses(
self.layers[-2], self.layers[-1], 'w:1', on_pre='v+=w',
dt=self._dt * self.sim.ms))
self.layers[-1].add_attribute('label')
self.layers[-1].label = layer.name
if 'spiketrains' in self._plot_keys \
or 'spiketrains_n_b_l_t' in self._log_keys:
self.spikemonitors.append(self.sim.SpikeMonitor(self.layers[-1]))
if 'v_mem' in self._plot_keys or 'mem_n_b_l_t' in self._log_keys:
self.statemonitors.append(self.sim.StateMonitor(self.layers[-1],
'v', True))
def build_dense(self, layer, weights=None):
if layer.activation == 'softmax':
raise warnings.warn("Activation 'softmax' not implemented. Using "
"'relu' activation instead.", RuntimeWarning)
_weights, biases = layer.get_weights()
if weights is None:
weights = _weights
self.set_biases(biases)
delay = self.config.getfloat('cell', 'delay')
connections = []
if len(self.flatten_shapes) == 1:
print("Swapping data_format of Flatten layer.")
flatten_name, shape = self.flatten_shapes.pop()
if self.data_format == 'channels_last':
y_in, x_in, f_in = shape
else:
f_in, y_in, x_in = shape
for i in range(weights.shape[0]): # Input neurons
# Sweep across channel axis of feature map. Assumes that each
# consecutive input neuron lies in a different channel. This is
# the case for channels_last, but not for channels_first.
f = i % f_in
# Sweep across height of feature map. Increase y by one if all
# rows along the channel axis were seen.
y = i // (f_in * x_in)
# Sweep across width of feature map.
x = (i // f_in) % x_in
new_i = f * x_in * y_in + x_in * y + x
for j in range(weights.shape[1]): # Output neurons
connections.append((new_i, j, weights[i, j], delay))
elif len(self.flatten_shapes) > 1:
raise RuntimeWarning("Not all Flatten layers have been consumed.")
else:
for i in range(weights.shape[0]):
for j in range(weights.shape[1]):
connections.append((i, j, weights[i, j], delay))
connections = np.array(connections)
self.connections[-1].connect(i=connections[:, 0].astype('int64'),
j=connections[:, 1].astype('int64'))
self.connections[-1].w = connections[:, 2]
def build_convolution(self, layer, weights=None):
delay = self.config.getfloat('cell', 'delay')
transpose_kernel = \
self.config.get('simulation', 'keras_backend') == 'tensorflow'
conns, biases = build_convolution(layer, delay, transpose_kernel)
connections = np.array(conns)
self.set_biases(biases)
print("Connecting layer...")
self.connections[-1].connect(i=connections[:, 0].astype('int64'),
j=connections[:, 1].astype('int64'))
w = connections[:, 2] if weights is None else weights.flatten()
self.connections[-1].w = w
def build_pooling(self, layer, weights=None):
delay = self.config.getfloat('cell', 'delay')
connections = np.array(build_pooling(layer, delay))
self.connections[-1].connect(i=connections[:, 0].astype('int64'),
j=connections[:, 1].astype('int64'))
w = connections[:, 2] if weights is None else weights.flatten()
self.connections[-1].w = w
def compile(self):
self.output_spikemonitor = self.sim.SpikeMonitor(self.layers[-1])
spikemonitors = self.spikemonitors + [self.output_spikemonitor]
self.snn = self.sim.Network(self.layers, self.connections,
spikemonitors, self.statemonitors)
self.snn.store()
# Set input layer
for obj in self.snn.objects:
if hasattr(obj, 'label') and obj.label == 'InputLayer':
self._input_layer = obj
assert self._input_layer, "No input layer found."
def simulate(self, **kwargs):
inputs = kwargs[str('x_b_l')].flatten() / self.sim.ms
if self._poisson_input:
self._input_layer.rates = inputs / self.rescale_fac
elif self._is_aedat_input:
# TODO: Implement by using brian2.SpikeGeneratorGroup.
raise NotImplementedError
else:
self._input_layer.bias = inputs
self.snn.run(self._duration * self.sim.ms, namespace=self._cell_params,
report='stdout', report_period=10 * self.sim.ms)
output_b_l_t = self.get_recorded_vars(self.layers)
return output_b_l_t
def reset(self, sample_idx):
mod = self.config.getint('simulation', 'reset_between_nth_sample')
mod = mod if mod else sample_idx + 1
if sample_idx % mod == 0:
print("Resetting simulator...")
self.snn.restore()
def end_sim(self):
pass
def save(self, path, filename):
print("Saving weights ...")
for i, connection in enumerate(self.connections):
filepath = os.path.join(path,
self.config.get('paths', 'filename_snn'),
'brian2-model',
self.layers[i + 1].label + '.npz')
if self.config.getboolean('output', 'overwrite') \
or confirm_overwrite(filepath):
directory = os.path.dirname(filepath)
if not os.path.exists(directory):
os.makedirs(directory)
print("Store weights of layer {} to file {}".format(
self.layers[i + 1].label, filepath))
np.savez(filepath, self.connections[i].w)
def load(self, path, filename):
dirpath = os.path.join(path, filename, 'brian2-model')
npz_files = [f for f in sorted(os.listdir(dirpath))
if os.path.isfile(os.path.join(dirpath, f))]
print("Loading spiking model...")
self.parsed_model = load_model(
os.path.join(self.config.get('paths', 'path_wd'),
self.config.get('paths',
'filename_parsed_model') + '.h5'))
self.num_classes = int(self.parsed_model.layers[-1].output_shape[-1])
self.top_k = min(self.num_classes, self.config.getint('simulation',
'top_k'))
# Get batch input shape
batch_shape = list(self.parsed_model.layers[0].batch_input_shape)
batch_shape[0] = self.batch_size
if self.config.get('conversion', 'spike_code') == 'ttfs_dyn_thresh':
batch_shape[0] *= 2
self.add_input_layer(batch_shape)
# Iterate over layers to create spiking neurons and connections.
for layer, f in zip(self.parsed_model.layers[1:], npz_files):
print("Building layer: {}".format(layer.name))
self.add_layer(layer)
layer_type = get_type(layer)
filepath = os.path.join(dirpath, f)
print("Using layer-weights stored in: {}".format(filepath))
print("Loading stored weights...")
input_file = np.load(filepath)
weights = input_file['arr_0']
if layer_type == 'Dense':
self.build_dense(layer, weights=weights)
elif layer_type == 'Conv2D':
self.build_convolution(layer, weights=weights)
if layer.data_format == 'channels_last':
self.data_format = layer.data_format
elif layer_type in {'MaxPooling2D', 'AveragePooling2D'}:
self.build_pooling(layer, weights=weights)
elif layer_type == 'Flatten':
self.build_flatten(layer)
print("Compiling spiking model...\n")
self.compile()
# Compute number of operations of ANN.
if self.fanout is None:
self.set_connectivity()
self.operations_ann = get_ann_ops(self.num_neurons,
self.num_neurons_with_bias,
self.fanin)
print("Number of operations of ANN: {}".format(
self.operations_ann))
print("Number of neurons: {}".format(sum(self.num_neurons[1:])))
print("Number of synapses: {}\n".format(self.num_synapses))
self.is_built = True
def init_cells(self):
self._cell_params = {
'v_thresh': self.config.getfloat('cell', 'v_thresh'),
'v_reset': self.config.getfloat('cell', 'v_reset'),
'tau_m': self.config.getfloat('cell', 'tau_m') * self.sim.ms}
def get_spiketrains(self, **kwargs):
j = self._spiketrains_container_counter
if self.spiketrains_n_b_l_t is None or \
j >= len(self.spiketrains_n_b_l_t):
return None
shape = self.spiketrains_n_b_l_t[j][0].shape
# Outer for-loop that calls this function starts with
# 'monitor_index' = 0, but this is reserved for the input and handled
# by `get_spiketrains_input()`.
i = len(self.spikemonitors) - 1 if kwargs[str('monitor_index')] == -1 \
else kwargs[str('monitor_index')] + 1
spiketrain_dict = self.spikemonitors[i].spike_trains()
spiketrains_flat = np.array([spiketrain_dict[key] / self.sim.ms for key
in spiketrain_dict.keys()])
spiketrains_b_l_t = \
self.reshape_flattened_spiketrains(spiketrains_flat, shape)
return spiketrains_b_l_t
def get_spiketrains_input(self):
shape = list(self.parsed_model.input_shape) + [self._num_timesteps]
spiketrain_dict = self.spikemonitors[0].spike_trains()
spiketrains_flat = np.array([spiketrain_dict[key] / self.sim.ms for key
in spiketrain_dict.keys()])
spiketrains_b_l_t = \
self.reshape_flattened_spiketrains(spiketrains_flat, shape)
return spiketrains_b_l_t
def get_spiketrains_output(self):
shape = [self.batch_size, self.num_classes, self._num_timesteps]
spiketrain_dict = self.output_spikemonitor.spike_trains()
spiketrains_flat = np.array([spiketrain_dict[key] / self.sim.ms for key
in spiketrain_dict.keys()])
spiketrains_b_l_t = \
self.reshape_flattened_spiketrains(spiketrains_flat, shape)
return spiketrains_b_l_t
def get_vmem(self, **kwargs):
j = kwargs[str('monitor_index')]
if j >= len(self.statemonitors):
return None
try:
return np.array([
np.array(v).transpose() for v in self.statemonitors[j].v])
except AttributeError:
return None
def set_spiketrain_stats_input(self):
AbstractSNN.set_spiketrain_stats_input(self)
def set_biases(self, biases):
"""Set biases."""
if any(biases):
assert self.layers[-1].bias.shape == biases.shape, \
"Shape of biases and network do not match."
self.layers[-1].bias = biases / self.sim.ms
| NeuromorphicProcessorProject/snn_toolbox | snntoolbox/simulation/target_simulators/brian2_target_sim.py | Python | mit | 15,271 |
import argparse
import jenkins
from rdoutils import jenkins_utils
def parse_args():
parser = argparse.ArgumentParser(description='Get status of jenkins job'
'running in ci.centos.org')
parser.add_argument('-j', '--job-name', dest='job_name', required=True,
help='Name of the job to get status')
parser.add_argument('-n', '--number', dest='number', type=int,
required=True,
help='Build number to get status')
parser.add_argument('-r', '--result-only', dest='result_only',
action='store_true', default=False,
help='Show only result of the job ')
parser.add_argument('-u', '--url', dest='url',
type=str, default='rdo',
help='URL of jenkins server')
return parser.parse_args()
def main():
args = parse_args()
server = jenkins_utils.get_jenkins_client(args.url)
try:
job = jenkins_utils.get_build_info(server, args.job_name, args.number)
if args.result_only:
print(job['result'])
else:
jenkins_utils.print_build_info(job)
except jenkins.NotFoundException:
print("Job %s number %s does not exist" % (args.job_name, args.number))
| rdo-infra/releng | rdoutils/cmd/get_jenkins_job.py | Python | apache-2.0 | 1,329 |
"""
Copyright 2008-2015 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
import sys
import re
import subprocess
import threading
import json
import random
import itertools
import six
from six.moves import queue, filter, range
###############################################################################
# The docstring extraction
###############################################################################
def docstring_guess_from_key(key):
"""
Extract the documentation from the python __doc__ strings
By guessing module and constructor names from key
Args:
key: the block key
Returns:
a dict (block_name --> doc string)
"""
doc_strings = dict()
in_tree = [key.partition('_')[::2] + (
lambda package: getattr(__import__('gnuradio.' + package), package),
)]
key_parts = key.split('_')
oot = [
('_'.join(key_parts[:i]), '_'.join(key_parts[i:]), __import__)
for i in range(1, len(key_parts))
]
for module_name, init_name, importer in itertools.chain(in_tree, oot):
if not module_name or not init_name:
continue
try:
module = importer(module_name)
break
except ImportError:
continue
else:
return doc_strings
pattern = re.compile('^' + init_name.replace('_', '_*').replace('x', r'\w') + r'\w*$')
for match in filter(pattern.match, dir(module)):
try:
doc_strings[match] = getattr(module, match).__doc__
except AttributeError:
continue
return doc_strings
def docstring_from_make(key, imports, make):
"""
Extract the documentation from the python __doc__ strings
By importing it and checking a truncated make
Args:
key: the block key
imports: a list of import statements (string) to execute
make: block constructor template
Returns:
a list of tuples (block_name, doc string)
"""
try:
blk_cls = make.partition('(')[0].strip()
if '$' in blk_cls:
raise ValueError('Not an identifier')
ns = dict()
exec(imports.strip(), ns)
blk = eval(blk_cls, ns)
doc_strings = {key: blk.__doc__}
except (ImportError, AttributeError, SyntaxError, ValueError):
doc_strings = docstring_guess_from_key(key)
return doc_strings
###############################################################################
# Manage docstring extraction in separate process
###############################################################################
class SubprocessLoader(object):
"""
Start and manage docstring extraction process
Manages subprocess and handles RPC.
"""
BOOTSTRAP = "import runpy; runpy.run_path({!r}, run_name='__worker__')"
AUTH_CODE = random.random() # sort out unwanted output of worker process
RESTART = 5 # number of worker restarts before giving up
DONE = object() # sentinel value to signal end-of-queue
def __init__(self, callback_query_result, callback_finished=None):
self.callback_query_result = callback_query_result
self.callback_finished = callback_finished or (lambda: None)
self._queue = queue.Queue()
self._thread = None
self._worker = None
self._shutdown = threading.Event()
self._last_cmd = None
def start(self):
""" Start the worker process handler thread """
if self._thread is not None:
return
self._shutdown.clear()
thread = self._thread = threading.Thread(target=self.run_worker)
thread.daemon = True
thread.start()
def run_worker(self):
""" Read docstring back from worker stdout and execute callback. """
for _ in range(self.RESTART):
if self._shutdown.is_set():
break
try:
self._worker = subprocess.Popen(
args=(sys.executable, '-uc', self.BOOTSTRAP.format(__file__)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
self._handle_worker()
except (OSError, IOError):
msg = "Warning: restarting the docstring loader"
cmd, args = self._last_cmd
if cmd == 'query':
msg += " (crashed while loading {0!r})".format(args[0])
print(msg, file=sys.stderr)
continue # restart
else:
break # normal termination, return
finally:
if self._worker:
self._worker.stdin.close()
self._worker.stdout.close()
self._worker.stderr.close()
self._worker.terminate()
self._worker.wait()
else:
print("Warning: docstring loader crashed too often", file=sys.stderr)
self._thread = None
self._worker = None
self.callback_finished()
def _handle_worker(self):
""" Send commands and responses back from worker. """
assert '1' == self._worker.stdout.read(1).decode('utf-8')
for cmd, args in iter(self._queue.get, self.DONE):
self._last_cmd = cmd, args
self._send(cmd, args)
cmd, args = self._receive()
self._handle_response(cmd, args)
def _send(self, cmd, args):
""" Send a command to worker's stdin """
fd = self._worker.stdin
query = json.dumps((self.AUTH_CODE, cmd, args))
fd.write(query.encode('utf-8'))
fd.write(b'\n')
fd.flush()
def _receive(self):
""" Receive response from worker's stdout """
for line in iter(self._worker.stdout.readline, ''):
try:
key, cmd, args = json.loads(line.decode('utf-8'))
if key != self.AUTH_CODE:
raise ValueError('Got wrong auth code')
return cmd, args
except ValueError:
if self._worker.poll():
raise IOError("Worker died")
else:
continue # ignore invalid output from worker
else:
raise IOError("Can't read worker response")
def _handle_response(self, cmd, args):
""" Handle response from worker, call the callback """
if cmd == 'result':
key, docs = args
self.callback_query_result(key, docs)
elif cmd == 'error':
print(args)
else:
print("Unknown response:", cmd, args, file=sys.stderr)
def query(self, key, imports=None, make=None):
""" Request docstring extraction for a certain key """
if self._thread is None:
self.start()
if imports and make:
self._queue.put(('query', (key, imports, make)))
else:
self._queue.put(('query_key_only', (key,)))
def finish(self):
""" Signal end of requests """
self._queue.put(self.DONE)
def wait(self):
""" Wait for the handler thread to die """
if self._thread:
self._thread.join()
def terminate(self):
""" Terminate the worker and wait """
self._shutdown.set()
try:
self._worker.terminate()
self.wait()
except (OSError, AttributeError):
pass
###############################################################################
# Main worker entry point
###############################################################################
def worker_main():
"""
Main entry point for the docstring extraction process.
Manages RPC with main process through stdin/stdout.
Runs a docstring extraction for each key it read on stdin.
"""
def send(code, cmd, args):
json.dump((code, cmd, args), sys.stdout)
sys.stdout.write('\n')
# fluh out to get new commands from the queue into stdin
sys.stdout.flush()
sys.stdout.write('1')
# flush out to signal the main process we are ready for new commands
sys.stdout.flush()
for line in iter(sys.stdin.readline, ''):
code, cmd, args = json.loads(line)
try:
if cmd == 'query':
key, imports, make = args
send(code, 'result', (key, docstring_from_make(key, imports, make)))
elif cmd == 'query_key_only':
key, = args
send(code, 'result', (key, docstring_guess_from_key(key)))
elif cmd == 'exit':
break
except Exception as e:
send(code, 'error', repr(e))
if __name__ == '__worker__':
worker_main()
elif __name__ == '__main__':
def callback(key, docs):
print(key)
for match, doc in six.iteritems(docs):
print('-->', match)
print(str(doc).strip())
print()
print()
r = SubprocessLoader(callback)
# r.query('analog_feedforward_agc_cc')
# r.query('uhd_source')
r.query('expr_utils_graph')
r.query('blocks_add_cc')
r.query('blocks_add_cc', ['import gnuradio.blocks'], 'gnuradio.blocks.add_cc(')
# r.query('analog_feedforward_agc_cc')
# r.query('uhd_source')
# r.query('uhd_source')
# r.query('analog_feedforward_agc_cc')
r.finish()
# r.terminate()
r.wait()
| trabucayre/gnuradio | grc/core/utils/extract_docs.py | Python | gpl-3.0 | 9,513 |
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
# General libraries
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import sys
import argparse
import datetime
import getpass
import os
import time
# OpenCV
import cv2
# Precip Attractor libraries
import time_tools_attractor as ti
import io_tools_attractor as io
import data_tools_attractor as dt
import stat_tools_attractor as st
# optical flow libraries
import optical_flow as of
# advection libraries
import adv2d
print(adv2d.__doc__)
import maple_ree
print(maple_ree.__doc__)
####################################
###### RADAR EXTRAPOLATION IN PYTHON
####################################
######## Default parameters
noData = -999.0
timeAccumMin = 5
domainSize = [512,512] #512
resKm = 1
rainThreshold = 0.08
######## Folder paths
usrName = getpass.getuser()
usrName = "lforesti"
inBaseDir = '/scratch/' + usrName + '/data/' # directory to read from
outBaseDir = '/store/msrad/radar/precip_attractor_' + usrName + '/data/'
######## Parse arguments from command line
parser = argparse.ArgumentParser(description='')
parser.add_argument('-start', default='201505151600', type=str,help='Start date of forecast YYYYMMDDHHmmSS.')
parser.add_argument('-leadtime', default=60, type=int,help='')
parser.add_argument('-stack', default=15, type=int,help='')
parser.add_argument('-product', default='AQC', type=str,help='Which radar rainfall product to use (AQC, CPC, etc).')
parser.add_argument('-frameRate', default=0.5, type=float,help='')
parser.add_argument('-adv', default='maple', type=str,help='')
args = parser.parse_args()
advectionScheme = args.adv
frameRate = args.frameRate
product = args.product
leadtime = args.leadtime
timewindow = np.max((5,args.stack))
if (int(args.start) < 198001010000) or (int(args.start) > 203001010000):
print('Invalid -start time arguments.')
sys.exit(1)
else:
timeStartStr = args.start
######## Get dattime from timestamp
timeStart = ti.timestring2datetime(timeStartStr)
timeAccumMinStr = '%05i' % timeAccumMin
timeAccum24hStr = '%05i' % (24*60)
######## GIS stuff
# Limits of CCS4 domain
Xmin = 255000
Xmax = 965000
Ymin = -160000
Ymax = 480000
allXcoords = np.arange(Xmin,Xmax+resKm*1000,resKm*1000)
allYcoords = np.arange(Ymin,Ymax+resKm*1000,resKm*1000)
# Shapefile filename
fileNameShapefile = "/users/" + usrName + "/pyscripts/shapefiles/CHE_adm0.shp"
proj4stringWGS84 = "+proj=longlat +ellps=WGS84 +datum=WGS84"
proj4stringCH = "+proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 \
+k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel +towgs84=674.374,15.056,405.346,0,0,0,0 +units=m +no_defs"
######## Colormaps
color_list, clevs = dt.get_colorlist('MeteoSwiss') #'STEPS' or 'MeteoSwiss'
clevsStr = []
for i in range(0,len(clevs)):
if (clevs[i] < 10) and (clevs[i] >= 1):
clevsStr.append(str('%.1f' % clevs[i]))
elif (clevs[i] < 1):
clevsStr.append(str('%.2f' % clevs[i]))
else:
clevsStr.append(str('%i' % clevs[i]))
cmap = colors.ListedColormap(color_list)
norm = colors.BoundaryNorm(clevs, cmap.N)
######## Loop over files to get two consecutive images
nrValidFields = 0
rainfallStack = np.zeros((2,domainSize[0],domainSize[1]))
nStacks = np.max((1,np.round(timewindow/timeAccumMin))).astype(int) + 1 # includes present obs
# number of leadtimes
net = np.round(leadtime/timeAccumMin).astype(int)
# leadtimes + number of observations
nt = net + nStacks
# initialise variables
zStack = []
tStack = []
xStack = []
yStack = []
uStack = []
vStack = []
tic = time.clock()
for i in range(nStacks-1,-1*net-1,-1):
######## Load radar images
timeLocal = timeStart - datetime.timedelta(seconds=i*60*timeAccumMin)
print(timeLocal)
year, yearStr, julianDay, julianDayStr = ti.parse_datetime(timeLocal)
hour = timeLocal.hour
minute = timeLocal.minute
# Create filename for input
hourminStr = ('%02i' % hour) + ('%02i' % minute)
radarOperWildCard = '?'
subDir = str(year) + '/' + yearStr + julianDayStr + '/'
inDir = inBaseDir + subDir
fileNameWildCard = inDir + product + yearStr + julianDayStr + hourminStr + radarOperWildCard + '_' + timeAccumMinStr + '*.gif'
# Get filename matching regular expression
fileName = io.get_filename_matching_regexpr(fileNameWildCard)
# Get data quality from fileName
dataQuality = io.get_quality_fromfilename(fileName)
# Check if file exists
isFile = os.path.isfile(fileName)
if (isFile == False):
print('File: ', fileNameWildCard, ' not found.')
else:
# Reading GIF file
try:
# Open GIF image
rain8bit, nrRows, nrCols = io.open_gif_image(fileName)
# Get GIF image metadata
alb, doe, mle, ppm, wei = io.get_gif_radar_operation(fileName)
# Generate lookup table
lut = dt.get_rainfall_lookuptable(noData)
# Replace 8bit values with rain rates
rainrate = lut[rain8bit]
if product == 'AQC': # AQC is given in millimiters!!!
rainrate[rainrate != noData] = rainrate[rainrate != noData]*(60/5)
# Get coordinates of reduced domain
extent = dt.get_reduced_extent(rainrate.shape[1], rainrate.shape[0], domainSize[1], domainSize[0])
Xmin = allXcoords[extent[0]]
Ymin = allYcoords[extent[1]]
Xmax = allXcoords[extent[2]]
Ymax = allYcoords[extent[3]]
subXcoords = np.arange(Xmin,Xmax,resKm*1000)
subYcoords = np.arange(Ymin,Ymax,resKm*1000)
# Select 512x512 domain in the middle
rainrate = dt.extract_middle_domain(rainrate, domainSize[1], domainSize[0])
rain8bit = dt.extract_middle_domain(rain8bit, domainSize[1], domainSize[0])
# rainrate = rainrate[150:350,50:250]
# rain8bit = rain8bit[150:350,50:250]
# Create mask radar composite
mask = np.ones(rainrate.shape)
mask[rainrate != noData] = np.nan
mask[rainrate == noData] = 1
# Compute WAR
war = st.compute_war(rainrate,rainThreshold, noData)
except IOError:
print('File ', fileName, ' not readable')
war = -1
if (war >= 0.01 or i < 0):
# -999 to nan
rainrate[rainrate < 0] = np.nan
rainratePlot = np.copy(rainrate)
# Set lowest rain thresholds
rainThreshold = 0.08
condition = rainrate < rainThreshold
rainrate[condition] = rainThreshold
# Compute corresponding reflectivity
A = 316.0
b = 1.5
dBZ,mindBZ,_ = dt.rainrate2reflectivity(rainrate,A,b)
dBZ[condition] = 0
dBZ[dBZ==-999] = 0
rainfieldZeros = dBZ.copy()
# nan with zeros
rainfieldZeros[np.isnan(rainfieldZeros)] = 0
# remove small noise with a morphological operator (opening)
rainfieldZeros = of.morphological_opening(rainfieldZeros, thr=rainThreshold, n=5)
# scale values between 0 and 255
rainfieldZeros *= 255.0/rainfieldZeros.max()
# Move rainfall field down the stack
nrValidFields = nrValidFields + 1
rainfallStack[1,:,:] = rainfallStack[0,:]
rainfallStack[0,:,:] = rainfieldZeros
# Stack image for plotting
zStack.append(rainratePlot)
tStack.append(timeLocal)
########### Compute optical flow on these two images
if (nrValidFields >= 2 and i >= 0):
# extract consecutive images
prvs = rainfallStack[1,:,:]
next = rainfallStack[0,:,:]
# 8-bit int
prvs = np.ndarray.astype(prvs,'uint8')
next = np.ndarray.astype(next,'uint8')
# (1a) features to track by threshold (cells)
# maxCorners = 200
# p0, nCorners = of.threshold_features_to_track(prvs, maxCorners, minThr = rainThreshold, blockSize = 35)
# (1b) Shi-Tomasi good features to track
maxCorners = 1000
p0, nCorners = of.ShiTomasi_features_to_track(prvs, maxCorners, qualityLevel=0.05, minDistance=15, blockSize=5)
# use both
# p0 = np.vstack((p0a,p0b))
# (2) Lucas-Kande tracking
x, y, u, v, err = of.LucasKanade_features_tracking(prvs, next, p0, winSize=(50,50), maxLevel=3)
# (3) exclude some unrealistic vectors
maxspeed = 100/12 # km/5min
speed = np.sqrt(u**2 + v**2)
keep = speed < maxspeed
u = u[keep].reshape(np.sum(keep),1)
v = v[keep].reshape(np.sum(keep),1)
x = x[keep].reshape(np.sum(keep),1)
y = y[keep].reshape(np.sum(keep),1)
# (4) stack vectors within time window
xStack.append(x)
yStack.append(y)
uStack.append(u)
vStack.append(v)
########### Compute optical flow on these two images
# convert lists of arrays into single arrays
x = np.vstack(xStack)
y = np.vstack(yStack)
u = np.vstack(uStack)
v = np.vstack(vStack)
t = np.hstack(tStack)
zplot = np.dstack(zStack)
# use rollaxis to get the shape to be tx512x512:
zplot = np.rollaxis(zplot,-1)
# (1) decluster sparse motion vectors
if (nStacks > 1):
x, y, u, v = of.declustering(x, y, u, v, R = 20, minN = 3)
# (2) kernel interpolation
xgrid, ygrid, U, V = of.interpolate_sparse_vectors_kernel(x, y, u, v, \
domainSize, b = [])
# or linear interpolation
# xgrid, ygrid, U, V = of.interpolate_sparse_vectors_linear(x, y, u, v, \
# domainSize)
toc = time.clock()
print('OF time: ',str(toc-tic),' seconds.')
# resize vector fields for plotting
xs, ys, Us, Vs = of.reduce_field_density_for_plotting(xgrid, ygrid, U, V, 25)
########### Advect most recent rainrate field using the computed optical flow
# MAPLE advection scheme
if (advectionScheme=='maple'):
# MAPLE computes all leadtimes in one run
print("Running",str(net),"leadtimes with MAPLE's advection scheme ...")
# resize motion fields by factor f
f = 0.5
if (f<1):
Ures = cv2.resize(U, (0,0), fx=f, fy=f)
Vres = cv2.resize(V, (0,0), fx=f, fy=f)
else:
Ures = U
Vres = V
# extract last radar image to advect
z = zplot[nStacks-1,:,:]
z[np.isnan(z)]=0
# call routine
tic = time.clock()
zmaple = maple_ree.ree_epol_slio(z, Vres, Ures, net)
toc = time.clock()
print('AD time: ',str((toc-tic)/net),' seconds per time-step.')
# Michael's advection scheme
if (advectionScheme=='ethz'):
print("Running",str(net),"leadtimes with Michael's advection scheme ...")
# extract last radar image to advect
z = zplot[nStacks-1,:,:]
z[np.isnan(z)]=0
# U,V per minute
Ured = U/timeAccumMin
Vred = V/timeAccumMin
tic = time.clock()
# loop for n leadtimes
ztmp = np.zeros((domainSize[0],domainSize[1],net*timeAccumMin+1))
ztmp[:,:,0] = z
for it in range(net*timeAccumMin):
ztmp[:,:,it+1] = adv2d.advxy(Vred,Ured,ztmp[:,:,it],0)
ztmp[ztmp<0.001]=0
zethz = ztmp[:,:,range(1,(net*timeAccumMin)+1,timeAccumMin)]
toc = time.clock()
print('AD time: ',str((toc-tic)/net),' seconds per time-step.')
# extract edges from observations
edgedStack = []
for it in range(zplot.shape[0]):
zobs = zplot[it,:,:]
zobs[np.isnan(zobs)] = 0.0
zobs = np.ndarray.astype(zobs>rainThreshold,'uint8')
# print(np.unique(zobs))
# zobs = cv2.bilateralFilter(zobs, 5, 17, 17)
edged = cv2.Canny(zobs, rainThreshold, rainThreshold)
edged = np.array(edged)
edged = edged.astype(float)
edged[edged<=0] = np.nan
edgedStack.append(edged)
# animation
try:
while True:
for it in range(nt):
plt.clf()
if (it <= nStacks-1): # observation mode
timeLocal = t[it]
z = zplot[it,:,:]
z[z<=0]=np.nan
titleStr = timeLocal.strftime("%Y.%m.%d %H:%M") + ', ' + product + ' rainfall field'
if (it>0):
x = xStack[it-1]
y = yStack[it-1]
u = uStack[it-1]
v = vStack[it-1]
plt.quiver(x,y,u,v,angles = 'xy', scale_units='xy', color='darkred')
else: # extrapolation mode
if (advectionScheme=='maple'):
z = np.squeeze(zmaple[:,:,it - nStacks])
elif (advectionScheme=='ethz'):
z = np.squeeze(zethz[:,:,it - nStacks])
z[z<=0]=np.nan
titleStr = timeLocal.strftime("%Y.%m.%d %H:%M") + ' + ' + str((it-nStacks+1)*5) + ' min, ' + product + ' rainfall field'
rainIm = plt.imshow(z, cmap=cmap, norm=norm, interpolation='nearest')
cbar = plt.colorbar(rainIm, ticks=clevs, spacing='uniform', norm=norm, extend='max', fraction=0.03)
cbar.set_ticklabels(clevsStr, update_ticks=True)
cbar.set_label("mm/hr")
if (it > nStacks-1):
plt.quiver(xs,ys,Us,Vs,angles = 'xy', scale_units='xy')
plt.imshow(edgedStack[it],cmap='Greys_r',interpolation='nearest')
plt.grid()
plt.title(titleStr)
saveplots=1
if saveplots:
figname = "tmp/frame_" + str(it).zfill(2) + ".png"
plt.savefig(figname)
print(figname + ' saved.')
if it == range(nt)[-1]:
print('Generating the animation...')
delay = 50
dpi = 100
outDir = 'tmp/'
stringGifName = timeStartStr + '_' + str(leadtime) + 'min.gif'
cmd = 'convert -delay ' + str(delay) + ' -loop 0 ' + outDir + '/*.png ' + stringGifName
os.system(cmd)
print(stringGifName, ' saved.')
sys.exit()
else:
plt.pause(frameRate)
except KeyboardInterrupt:
pass | meteoswiss-mdr/precipattractor | pyscripts/radar_extrapolation.py | Python | gpl-3.0 | 15,346 |
# -*- coding: utf-8 -*-
import os, Image
from datetime import datetime
from django.template import Library
from django.utils.timesince import timesince
from oi.settings import MEDIA_ROOT, MEDIA_URL
from oi.forum.postmarkup import render_bbcode
register = Library()
@register.filter
def thumbnail(file, size='200x200'):
# defining the size
x, y = [int(x) for x in size.split('x')]
# defining the filename and the miniature filename
basename, format = file.rsplit('.', 1)
miniature = basename + '_' + size + '.' + format
miniature_filename = os.path.join(MEDIA_ROOT, miniature)
miniature_url = os.path.join(MEDIA_URL, miniature)
filename = os.path.join(MEDIA_ROOT, file)
if os.path.exists(filename):
# if image has been modified, remove old thumbnail
if os.path.exists(miniature_filename) and os.path.getmtime(filename)>os.path.getmtime(miniature_filename):
os.unlink(miniature_filename)
# if the image wasn't already resized, resize it
if not os.path.exists(miniature_filename):
#print '>>> debug: resizing the image to the format %s!' % size
image = Image.open(filename)
image.thumbnail([x, y]) # generate a 200x200 thumbnail
image.save(miniature_filename, image.format)
return miniature_url
else:
return file
@register.filter
def renderbbcode(context):
return render_bbcode(context)
@register.inclusion_tag('paginator.html', takes_context=True)
def paginator(context, adjacent_pages=4):
"""
To be used in conjunction with the object_list generic view.
Adds pagination context variables for use in displaying first, adjacent and
last page links in addition to those created by the object_list generic
view.
"""
page_numbers = [n for n in \
range(context['page'] - adjacent_pages, context['page'] + adjacent_pages + 1) \
if n > 0 and n <= context['pages']]
return {
'hits': context['hits'],
'results_per_page': context['results_per_page'],
'page': context['page'],
'pages': context['pages'],
'page_numbers': page_numbers,
'next': context['next'],
'previous': context['previous'],
'has_next': context['has_next'],
'has_previous': context['has_previous'],
'show_first': 1 not in page_numbers,
'show_last': context['pages'] not in page_numbers,
}
@register.inclusion_tag('paginator_for_tema.html', takes_context=True)
def paginator_for_tema(context, adjacent_pages=4):
"""
To be used in conjunction with the object_list generic view.
Adds pagination context variables for use in displaying first, adjacent and
last page links in addition to those created by the object_list generic
view.
"""
page_numbers = [n for n in \
range(context['page'] - adjacent_pages, context['page'] + adjacent_pages + 1) \
if n > 0 and n <= context['pages']]
return {
'hits': context['hits'],
'results_per_page': context['results_per_page'],
'page': context['page'],
'pages': context['pages'],
'page_numbers': page_numbers,
'next': context['next'],
'previous': context['previous'],
'has_next': context['has_next'],
'has_previous': context['has_previous'],
'show_first': 1 not in page_numbers,
'show_last': context['pages'] not in page_numbers,
'order': context['order']
}
@register.filter
def timedelta(value, arg=None):
if not value:
return ''
if arg:
cmp = arg
else:
cmp = datetime.now()
if value > cmp:
return "%s sonra" % timesince(cmp,value)
else:
return "%s önce" % timesince(value,cmp)
@register.filter
def rfc822datetime(value):
if value:
rfc822months = ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
return "%s %s %d %s:%s:%s +0200" % (str(value.day).zfill(2), rfc822months[value.month-1], value.year, str(value.hour).zfill(2), str(value.minute).zfill(2), str(value.second).zfill(2))
else:
return ""
| MehmetNuri/ozgurlukicin | st/templatetags/library.py | Python | gpl-3.0 | 4,221 |
# import yaml
# import json
# import sql
import psycopg2
# import datetime
# import pprint
# # connect to database makerspace
# conn = psycopg2.connect("host=localhost dbname=makerspace user=postgres")
# # open a cursor to perform database operations
# cur = conn.cursor()
# sdb = StudentDatabase( "host", "db_name", "user", "pass" )
# sdb.add_user( "cardID", "ID", "netID", "Name", "Lastname", "oldid" )
class StudentDatabase:
def __init__(self, host, db_name, user, password, port=5432):
# TODO: add password field to connection after successful database connection
self.host = host
self.db_name = db_name
self.user = user
self.password = password
self.port = port
try:
# TODO: add password field
# connect to database makerspace
self.conn = psycopg2.connect("host={} dbname={} user={} port={}".format(self.host, self.db_name, self.user, self.port))
# open a cursor to perform database operations
except:
print("ERROR: I am unable to connect to the database. \n Please supply parameters in this format: StudentDatabase.StudentDatabase(self, host, db_name, user, password)")
def __dbReq(self, stringReq):
"""
Handles creating a cursor, making a request, and closing said cursor.
"""
# TODO: Add exception handling, do some logging
try:
cur = self.conn.cursor()
cur.execute(stringReq)
try:
result = cur.fetchall() # if not a fetch operation, this will fail.
print("results fetched!")
except:
result = None
pass
self.conn.commit()
print("Request made!")
cur.close()
return result
except Exception as e:
print(e)
def get_init(self):
return "Parameters for database connection are: host={} db_name={} password={} user={} port={}".format(self.host, self.db_name, self.password, self.user, self.port)
def user_exists(self, card_id, uw_id, uw_netid):
user_exists = self.__dbReq('SELECT * FROM users WHERE card_id=\'{}\' OR uw_id=\'{}\' OR uw_netid=\'{}\''.format(str(card_id), str(uw_id), str(uw_netid)))
print(user_exists)
return bool(user_exists) # if true then user exists
def add_user(self, card_id, uw_id, uw_netid, first_name, last_name):
# searches table for existing users with any matching unique inputs, i.e. duplicates
try:
if not self.user_exists(card_id, uw_id, uw_netid): # user does not exist
print("didn't find user in database! now inserting user into database")
request = "INSERT INTO users (card_id, uw_id, uw_netid, first_name, last_name) VALUES(\'{}\', \'{}\', \'{}\', \'{}\', \'{}\')".format(str(card_id), str(uw_id), str(uw_netid), str(first_name), str(last_name))
self.__dbReq(request)
return True
else:
print("user already exists!")
return False
except Exception as e:
print(e)
# removing user from form input
def remove_user(self, card_id, uw_id, uw_netid):
# if a user is found, remove them from the users table
if self.user_exists(card_id, uw_id, uw_netid): # user exists
# TODO: move deleted user to new table?
print("found user in database! now deleting user from database...")
# TODO: decide if we need this to be an OR or an AND
self.__dbReq("DELETE FROM users WHERE card_id=\'{}\' OR uw_id=\'{}\' OR uw_netid=\'{}\'".format(str(card_id), str(uw_id), str(uw_netid)))
return True
# error, no user found matching inputs
else:
print("didn\'t find user in database!")
return False
# editing user entry by form input
def edit_user(self, id, card_id, uw_id, uw_netid, first_name, last_name):
# if id is found update user entry exactly
if self.user_exists(card_id, uw_id, uw_netid): # user exists
self.__dbReq("UPDATE users SET card_id=\'{}\', uw_id=\'{}\', uw_netid=\'{}\', first_name=\'{}\', last_name=\'{}\' WHERE id=\'{}\'".format(str(card_id), str(uw_id), str(uw_netid), str(first_name), str(last_name), str(id)))
return True
# error, no id found so no update
else:
print("error, no id found so no update")
return False
def display_all_users(self):
return self.__dbReq("SELECT * FROM users;")
def display_user_no_html(self, id=None, card_id=None, uw_id=None, uw_netid=None, first_name=None, last_name=None):
pass
# display all users
# TODO: is this currently necessary? What will it provide us?
def display_users(self):
table = "<table>"
self.__dbReq("SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME=\'users\'")
header = cur.fetchall()
for column in header:
table += "<th>" + str(column[3]) + "</th>"
self.__dbReq("SELECT * FROM users")
data = cur.fetchall()
for row in data:
table += "<tr>"
for column in row:
table += "<td>" + str(column) + "</td>"
table += "</tr>"
return table + "</table>"
def membership_exists(self, uw_id, type):
# TODO: decide if this needs to have the TYPE column attached to it, so that we can verify info for what membership has access to.
membership_exists = self.__dbReq("SELECT * FROM memberships WHERE uw_id=\'{}\' AND type=\'{}\'".format(str(uw_id), str(type)))
print(membership_exists)
return bool(membership_exists) # if true then membership exists
# add membership to uw_id given card_id and type of membership
# expiration_date is only required if it is a main_door membership
def add_membership(self, uw_id, type, join_date, expiration_date):
# searches table for existing memberships with any matching unique inputs, i.e. duplicates
try:
if not self.membership_exists(uw_id, type): # membership does not exist
print("didn't find membership in database! now inserting membership into database")
print(join_date)
request = "INSERT INTO memberships (uw_id, type, join_date, expiration_date) VALUES(\'{}\', \'{}\', \'{}\', \'{}\')".format(str(uw_id), str(type), str(join_date), str(expiration_date))
self.__dbReq(request)
return True
else:
print("membership already exists!")
return False
except Exception as e:
print(e)
# display all memberships and allow removing one by selecting one
def remove_membership(self, uw_id, type, join_date, expiration_date):
try:
if self.membership_exists(uw_id, type): # membership does not exist
print("found membership in database! now removing membership from database")
print(join_date)
# these logical operators probably need to be changed.
request = "DELETE FROM memberships WHERE uw_id=\'{}\' AND type=\'{}\' OR join_date=\'{}\' OR expiration_date=\'{}\'".format(str(uw_id), str(type), str(join_date), str(expiration_date))
self.__dbReq(request)
return True
else:
print("membership doesn't exists... error")
return False
except Exception as e:
print(e)
# edit details of a membership
def edit_membership(self, id, uw_id, type, join_date, expiration_date):
# if id is found update user entry exactly
if self.membership_exists(uw_id, type): # user exists
request = "UPDATE users SET uw_id=\'{}\', type=\'{}\', join_date=\'{}\', expiration_date=\'{}\' WHERE id=\'{}\'".format(str(uw_id), str(type), str(join_date), str(expiration_date))
self.__dbReq(request)
return True
# error, no id found so no update
else:
print("error, no id found so no update")
return False
# ban membership of uw_id given card_id and type of membership
# start_date is from time of form submission and end_date set by submitter
def ban_card(self, uw_id, type, start_date, end_date):
pass
# # display list of all bans and allow unbanning by selecting one
# def unban_card(self, card_id, type, start_date, end_date):
# pass
#
# def add_card_reader(self):
# pass
#
# def edit_card_reader(self):
# pass
#
# def remove_card_reader(self):
# pass
#
# def add_equipment_groups(self):
# pass
#
# def edit_equipment_groups(self):
# pass
#
# def remove_equipment_groups(self):
# pass
#
# # writes to table card_activity
# def write_card_activity(self, uw_id, type, date):
# # def write_card_activity(uw_id, type, date, pass='0'): doesn't work for Cody & Chuan
# pass
#
# # optional: show which equipment types a user is trained on
# def show_trained_equipment(uw_id):
# pass
#
# # optional: show all users trained on an equipment type
# def show_trained_users(type):
# pass
#
#
# class Members():
# def __init__ (self, card_id, type, expiration_date, start_date, end_date, date):
# self.card_id = card_id
# self.type = type
# self.expiration_date = expiration_date
# self.start_date = start_date
# self.end_date = end_date
# self.date = date
#
#
# # checks card number for bans then for membership then if membership is expired
# def card_swipe(card_id, card_reader):
# # given card_reader id get equipment type from card_readers table
# self.__dbReq("SELECT type FROM card_readers WHERE id=%(card_reader)s", {'card_reader': card_reader})
# if cur.rowcount > 0:
# type = cur.fetchall()[0][0]
# # given user's card_id get user's uw_id from users table
# self.__dbReq("SELECT uw_id FROM users WHERE card_id=%(card_id)s", {'card_id': card_id})
# if cur.rowcount > 0:
# uw_id = cur.fetchall()[0][0]
# # search memberships table for uw_id and equipment type and if found return expiration_date
# self.__dbReq("SELECT expiration_date FROM memberships WHERE uw_id=%(uw_id)s AND type=%(type)s ORDER BY expiration_date DESC", {'uw_id': uw_id, 'type': type})
# if cur.rowcount > 0:
# expiration_date = cur.fetchall()[0][0]
# if expiration_date > time.time():
# return 'GJ YOU IN'
# # call write_card_activity()
# else:
# return 'U FAILED'
def test_users_table(self, id, card_id, uw_id, uw_netid, first_name, last_name):
print("****USER EXISTS?****")
print(student.user_exists(card_id, uw_id, uw_netid)) # check if student exists
print("****ADDING USER****")
student.add_user(card_id, uw_id, uw_netid, first_name, last_name)
print("****EDITITNG USER****")
student.edit_user(id, card_id, uw_id, uw_netid, first_name, last_name) # when edited, we can see that their entry goes to the end of the list
print("****DISPLAYING ALL USERS****")
student.display_all_users()
print("****REMOVING USER****")
student.remove_user(card_id, uw_id, uw_netid)
def test_memberships_table(self, id, uw_id, type, join_date, expiration_date):
print("****MEMBERSHIP EXISTS?****")
print(student.membership_exists(uw_id, type)) # check if membership exists
print("****ADDING MEMBERSHIP****")
student.add_membership(uw_id, type, join_date, expiration_date)
# student.remove_membership(uw_id, type, join_date, expiration_date)
print("****EDITITNG MEMBERSHIP****")
student.edit_membership(id, uw_id, type, join_date, expiration_date)
# print("****REMOVING MEMBERSHIP****")
# student.remove_user(, "1234512341", "1234512341")
# pprint.pprint(student.display_all_users())
student = StudentDatabase("localhost", "postgres", "postgres", "1234")
# Testing for test_users_table
print("****INITIALIZATION****")
print(student.get_init())
# must update below line every time run because of id state change.
student.test_users_table("2", "16808469", "16808469", "chunter6", "CHRISTOPHER J.", "HUNTER")
# Testing for test_memberships_table
#join_date = datetime.date.today().strftime("%Y%m%d") # changed to this to test, but date can be anything
#student.test_memberships_table("34" ,"16808635", "main_door", "1494610320", "1494610320")
| aradler/Card-lockout | server/StudentDatabase.py | Python | mit | 12,892 |
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import grpc
import mock
import requests
from six.moves import http_client
from google.api_core import exceptions
def test_create_google_cloud_error():
exception = exceptions.GoogleAPICallError('Testing')
exception.code = 600
assert str(exception) == '600 Testing'
assert exception.message == 'Testing'
assert exception.errors == []
assert exception.response is None
def test_create_google_cloud_error_with_args():
error = {
'domain': 'global',
'location': 'test',
'locationType': 'testing',
'message': 'Testing',
'reason': 'test',
}
response = mock.sentinel.response
exception = exceptions.GoogleAPICallError(
'Testing', [error], response=response)
exception.code = 600
assert str(exception) == '600 Testing'
assert exception.message == 'Testing'
assert exception.errors == [error]
assert exception.response == response
def test_from_http_status():
message = 'message'
exception = exceptions.from_http_status(http_client.NOT_FOUND, message)
assert exception.code == http_client.NOT_FOUND
assert exception.message == message
assert exception.errors == []
def test_from_http_status_with_errors_and_response():
message = 'message'
errors = ['1', '2']
response = mock.sentinel.response
exception = exceptions.from_http_status(
http_client.NOT_FOUND, message, errors=errors, response=response)
assert isinstance(exception, exceptions.NotFound)
assert exception.code == http_client.NOT_FOUND
assert exception.message == message
assert exception.errors == errors
assert exception.response == response
def test_from_http_status_unknown_code():
message = 'message'
status_code = 156
exception = exceptions.from_http_status(status_code, message)
assert exception.code == status_code
assert exception.message == message
def make_response(content):
response = requests.Response()
response._content = content
response.status_code = http_client.NOT_FOUND
response.request = requests.Request(
method='POST', url='https://example.com').prepare()
return response
def test_from_http_response_no_content():
response = make_response(None)
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
assert exception.code == http_client.NOT_FOUND
assert exception.message == 'POST https://example.com/: unknown error'
assert exception.response == response
def test_from_http_response_text_content():
response = make_response(b'message')
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
assert exception.code == http_client.NOT_FOUND
assert exception.message == 'POST https://example.com/: message'
def test_from_http_response_json_content():
response = make_response(json.dumps({
'error': {
'message': 'json message',
'errors': ['1', '2']
}
}).encode('utf-8'))
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
assert exception.code == http_client.NOT_FOUND
assert exception.message == 'POST https://example.com/: json message'
assert exception.errors == ['1', '2']
def test_from_http_response_bad_json_content():
response = make_response(json.dumps({'meep': 'moop'}).encode('utf-8'))
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
assert exception.code == http_client.NOT_FOUND
assert exception.message == 'POST https://example.com/: unknown error'
def test_from_http_response_json_unicode_content():
response = make_response(json.dumps({
'error': {
'message': u'\u2019 message',
'errors': ['1', '2']
}
}).encode('utf-8'))
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
assert exception.code == http_client.NOT_FOUND
assert exception.message == u'POST https://example.com/: \u2019 message'
assert exception.errors == ['1', '2']
def test_from_grpc_status():
message = 'message'
exception = exceptions.from_grpc_status(
grpc.StatusCode.OUT_OF_RANGE, message)
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.OutOfRange)
assert exception.code == http_client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
assert exception.message == message
assert exception.errors == []
def test_from_grpc_status_with_errors_and_response():
message = 'message'
response = mock.sentinel.response
errors = ['1', '2']
exception = exceptions.from_grpc_status(
grpc.StatusCode.OUT_OF_RANGE, message,
errors=errors, response=response)
assert isinstance(exception, exceptions.OutOfRange)
assert exception.message == message
assert exception.errors == errors
assert exception.response == response
def test_from_grpc_status_unknown_code():
message = 'message'
exception = exceptions.from_grpc_status(
grpc.StatusCode.OK, message)
assert exception.grpc_status_code == grpc.StatusCode.OK
assert exception.message == message
def test_from_grpc_error():
message = 'message'
error = mock.create_autospec(grpc.Call, instance=True)
error.code.return_value = grpc.StatusCode.INVALID_ARGUMENT
error.details.return_value = message
exception = exceptions.from_grpc_error(error)
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.InvalidArgument)
assert exception.code == http_client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.INVALID_ARGUMENT
assert exception.message == message
assert exception.errors == [error]
assert exception.response == error
def test_from_grpc_error_non_call():
message = 'message'
error = mock.create_autospec(grpc.RpcError, instance=True)
error.__str__.return_value = message
exception = exceptions.from_grpc_error(error)
assert isinstance(exception, exceptions.GoogleAPICallError)
assert exception.code is None
assert exception.grpc_status_code is None
assert exception.message == message
assert exception.errors == [error]
assert exception.response == error
| jonparrott/gcloud-python | api_core/tests/unit/test_exceptions.py | Python | apache-2.0 | 7,078 |
"""Runtime entry data for ESPHome stored in hass.data."""
import asyncio
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple
from aioesphomeapi import (
COMPONENT_TYPE_TO_INFO,
BinarySensorInfo,
CameraInfo,
ClimateInfo,
CoverInfo,
DeviceInfo,
EntityInfo,
EntityState,
FanInfo,
LightInfo,
SensorInfo,
SwitchInfo,
TextSensorInfo,
UserService,
)
import attr
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import HomeAssistantType
if TYPE_CHECKING:
from . import APIClient
DATA_KEY = "esphome"
SAVE_DELAY = 120
# Mapping from ESPHome info type to HA platform
INFO_TYPE_TO_PLATFORM = {
BinarySensorInfo: "binary_sensor",
CameraInfo: "camera",
ClimateInfo: "climate",
CoverInfo: "cover",
FanInfo: "fan",
LightInfo: "light",
SensorInfo: "sensor",
SwitchInfo: "switch",
TextSensorInfo: "sensor",
}
@attr.s
class RuntimeEntryData:
"""Store runtime data for esphome config entries."""
entry_id: str = attr.ib()
client: "APIClient" = attr.ib()
store: Store = attr.ib()
reconnect_task: Optional[asyncio.Task] = attr.ib(default=None)
state: Dict[str, Dict[str, Any]] = attr.ib(factory=dict)
info: Dict[str, Dict[str, Any]] = attr.ib(factory=dict)
# A second list of EntityInfo objects
# This is necessary for when an entity is being removed. HA requires
# some static info to be accessible during removal (unique_id, maybe others)
# If an entity can't find anything in the info array, it will look for info here.
old_info: Dict[str, Dict[str, Any]] = attr.ib(factory=dict)
services: Dict[int, "UserService"] = attr.ib(factory=dict)
available: bool = attr.ib(default=False)
device_info: Optional[DeviceInfo] = attr.ib(default=None)
cleanup_callbacks: List[Callable[[], None]] = attr.ib(factory=list)
disconnect_callbacks: List[Callable[[], None]] = attr.ib(factory=list)
loaded_platforms: Set[str] = attr.ib(factory=set)
platform_load_lock: asyncio.Lock = attr.ib(factory=asyncio.Lock)
@callback
def async_update_entity(
self, hass: HomeAssistantType, component_key: str, key: int
) -> None:
"""Schedule the update of an entity."""
signal = f"esphome_{self.entry_id}_update_{component_key}_{key}"
async_dispatcher_send(hass, signal)
@callback
def async_remove_entity(
self, hass: HomeAssistantType, component_key: str, key: int
) -> None:
"""Schedule the removal of an entity."""
signal = f"esphome_{self.entry_id}_remove_{component_key}_{key}"
async_dispatcher_send(hass, signal)
async def _ensure_platforms_loaded(
self, hass: HomeAssistantType, entry: ConfigEntry, platforms: Set[str]
):
async with self.platform_load_lock:
needed = platforms - self.loaded_platforms
tasks = []
for platform in needed:
tasks.append(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
if tasks:
await asyncio.wait(tasks)
self.loaded_platforms |= needed
async def async_update_static_infos(
self, hass: HomeAssistantType, entry: ConfigEntry, infos: List[EntityInfo]
) -> None:
"""Distribute an update of static infos to all platforms."""
# First, load all platforms
needed_platforms = set()
for info in infos:
for info_type, platform in INFO_TYPE_TO_PLATFORM.items():
if isinstance(info, info_type):
needed_platforms.add(platform)
break
await self._ensure_platforms_loaded(hass, entry, needed_platforms)
# Then send dispatcher event
signal = f"esphome_{self.entry_id}_on_list"
async_dispatcher_send(hass, signal, infos)
@callback
def async_update_state(self, hass: HomeAssistantType, state: EntityState) -> None:
"""Distribute an update of state information to all platforms."""
signal = f"esphome_{self.entry_id}_on_state"
async_dispatcher_send(hass, signal, state)
@callback
def async_update_device_state(self, hass: HomeAssistantType) -> None:
"""Distribute an update of a core device state like availability."""
signal = f"esphome_{self.entry_id}_on_device_update"
async_dispatcher_send(hass, signal)
async def async_load_from_store(self) -> Tuple[List[EntityInfo], List[UserService]]:
"""Load the retained data from store and return de-serialized data."""
restored = await self.store.async_load()
if restored is None:
return [], []
self.device_info = _attr_obj_from_dict(
DeviceInfo, **restored.pop("device_info")
)
infos = []
for comp_type, restored_infos in restored.items():
if comp_type not in COMPONENT_TYPE_TO_INFO:
continue
for info in restored_infos:
cls = COMPONENT_TYPE_TO_INFO[comp_type]
infos.append(_attr_obj_from_dict(cls, **info))
services = []
for service in restored.get("services", []):
services.append(UserService.from_dict(service))
return infos, services
async def async_save_to_store(self) -> None:
"""Generate dynamic data to store and save it to the filesystem."""
store_data = {"device_info": attr.asdict(self.device_info), "services": []}
for comp_type, infos in self.info.items():
store_data[comp_type] = [attr.asdict(info) for info in infos.values()]
for service in self.services.values():
store_data["services"].append(service.to_dict())
self.store.async_delay_save(lambda: store_data, SAVE_DELAY)
def _attr_obj_from_dict(cls, **kwargs):
return cls(**{key: kwargs[key] for key in attr.fields_dict(cls) if key in kwargs})
| tboyce021/home-assistant | homeassistant/components/esphome/entry_data.py | Python | apache-2.0 | 6,177 |
"""
Student Views
"""
import datetime
import feedparser
import json
import logging
import random
import re
import string # pylint: disable=W0402
import urllib
import uuid
import time
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.core.cache import cache
from django.core.context_processors import csrf
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.core.validators import validate_email, validate_slug, ValidationError
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError, transaction
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotAllowed, Http404
from django.shortcuts import redirect
from django_future.csrf import ensure_csrf_cookie
from django.utils.http import cookie_date
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_POST
from ratelimitbackend.exceptions import RateLimitException
from mitxmako.shortcuts import render_to_response, render_to_string
from bs4 import BeautifulSoup
from student.models import (Registration, UserProfile, TestCenterUser, TestCenterUserForm,
TestCenterRegistration, TestCenterRegistrationForm,
PendingNameChange, PendingEmailChange,
CourseEnrollment, unique_id_for_user,
get_testcenter_registration, CourseEnrollmentAllowed)
from student.forms import PasswordResetFormNoActive
from certificates.models import CertificateStatuses, certificate_status_for_student
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.django import modulestore
from collections import namedtuple
from courseware.courses import get_courses, sort_by_announcement
from courseware.access import has_access
from external_auth.models import ExternalAuthMap
from bulk_email.models import Optout
import track.views
from statsd import statsd
from pytz import UTC
log = logging.getLogger("mitx.student")
AUDIT_LOG = logging.getLogger("audit")
Article = namedtuple('Article', 'title url author image deck publication publish_date')
def csrf_token(context):
"""A csrf token that can be included in a form."""
csrf_token = context.get('csrf_token', '')
if csrf_token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (csrf_token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context={}, user=None):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
# The course selection work is done in courseware.courses.
domain = settings.MITX_FEATURES.get('FORCE_UNIVERSITY_DOMAIN') # normally False
# do explicit check, because domain=None is valid
if domain == False:
domain = request.META.get('HTTP_HOST')
courses = get_courses(None, domain=domain)
courses = sort_by_announcement(courses)
context = {'courses': courses}
context.update(extra_context)
return render_to_response('index.html', context)
def course_from_id(course_id):
"""Return the CourseDescriptor corresponding to this course_id"""
course_loc = CourseDescriptor.id_to_location(course_id)
return modulestore().get_instance(course_id, course_loc)
day_pattern = re.compile(r'\s\d+,\s')
multimonth_pattern = re.compile(r'\s?\-\s?\S+\s')
def _get_date_for_press(publish_date):
# strip off extra months, and just use the first:
date = re.sub(multimonth_pattern, ", ", publish_date)
if re.search(day_pattern, date):
date = datetime.datetime.strptime(date, "%B %d, %Y").replace(tzinfo=UTC)
else:
date = datetime.datetime.strptime(date, "%B, %Y").replace(tzinfo=UTC)
return date
def press(request):
json_articles = cache.get("student_press_json_articles")
if json_articles is None:
if hasattr(settings, 'RSS_URL'):
content = urllib.urlopen(settings.PRESS_URL).read()
json_articles = json.loads(content)
else:
content = open(settings.PROJECT_ROOT / "templates" / "press.json").read()
json_articles = json.loads(content)
cache.set("student_press_json_articles", json_articles)
articles = [Article(**article) for article in json_articles]
articles.sort(key=lambda item: _get_date_for_press(item.publish_date), reverse=True)
return render_to_response('static_templates/press.html', {'articles': articles})
def process_survey_link(survey_link, user):
"""
If {UNIQUE_ID} appears in the link, replace it with a unique id for the user.
Currently, this is sha1(user.username). Otherwise, return survey_link.
"""
return survey_link.format(UNIQUE_ID=unique_id_for_user(user))
def cert_info(user, course):
"""
Get the certificate info needed to render the dashboard section for the given
student and course. Returns a dictionary with keys:
'status': one of 'generating', 'ready', 'notpassing', 'processing', 'restricted'
'show_download_url': bool
'download_url': url, only present if show_download_url is True
'show_disabled_download_button': bool -- true if state is 'generating'
'show_survey_button': bool
'survey_url': url, only if show_survey_button is True
'grade': if status is not 'processing'
"""
if not course.has_ended():
return {}
return _cert_info(user, course, certificate_status_for_student(user, course.id))
def _cert_info(user, course, cert_status):
"""
Implements the logic for cert_info -- split out for testing.
"""
default_status = 'processing'
default_info = {'status': default_status,
'show_disabled_download_button': False,
'show_download_url': False,
'show_survey_button': False}
if cert_status is None:
return default_info
# simplify the status for the template using this lookup table
template_state = {
CertificateStatuses.generating: 'generating',
CertificateStatuses.regenerating: 'generating',
CertificateStatuses.downloadable: 'ready',
CertificateStatuses.notpassing: 'notpassing',
CertificateStatuses.restricted: 'restricted',
}
status = template_state.get(cert_status['status'], default_status)
d = {'status': status,
'show_download_url': status == 'ready',
'show_disabled_download_button': status == 'generating', }
if (status in ('generating', 'ready', 'notpassing', 'restricted') and
course.end_of_course_survey_url is not None):
d.update({
'show_survey_button': True,
'survey_url': process_survey_link(course.end_of_course_survey_url, user)})
else:
d['show_survey_button'] = False
if status == 'ready':
if 'download_url' not in cert_status:
log.warning("User %s has a downloadable cert for %s, but no download url",
user.username, course.id)
return default_info
else:
d['download_url'] = cert_status['download_url']
if status in ('generating', 'ready', 'notpassing', 'restricted'):
if 'grade' not in cert_status:
# Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,
# who need to be regraded (we weren't tracking 'notpassing' at first).
# We can add a log.warning here once we think it shouldn't happen.
return default_info
else:
d['grade'] = cert_status['grade']
return d
@ensure_csrf_cookie
def signin_user(request):
"""
This view will display the non-modal login form
"""
if request.user.is_authenticated():
return redirect(reverse('dashboard'))
context = {
'course_id': request.GET.get('course_id'),
'enrollment_action': request.GET.get('enrollment_action')
}
return render_to_response('login.html', context)
@ensure_csrf_cookie
def register_user(request, extra_context=None):
"""
This view will display the non-modal registration form
"""
if request.user.is_authenticated():
return redirect(reverse('dashboard'))
context = {
'course_id': request.GET.get('course_id'),
'enrollment_action': request.GET.get('enrollment_action')
}
if extra_context is not None:
context.update(extra_context)
return render_to_response('register.html', context)
@login_required
@ensure_csrf_cookie
def dashboard(request):
user = request.user
# Build our courses list for the user, but ignore any courses that no longer
# exist (because the course IDs have changed). Still, we don't delete those
# enrollments, because it could have been a data push snafu.
courses = []
for enrollment in CourseEnrollment.enrollments_for_user(user):
try:
courses.append(course_from_id(enrollment.course_id))
except ItemNotFoundError:
log.error("User {0} enrolled in non-existent course {1}"
.format(user.username, enrollment.course_id))
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
message = ""
if not user.is_active:
message = render_to_string('registration/activate_account_notice.html', {'email': user.email})
# Global staff can see what courses errored on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'global', 'staff'):
# Show any courses that errored on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = frozenset(course.id for course in courses
if has_access(request.user, course, 'load'))
cert_statuses = {course.id: cert_info(request.user, course) for course in courses}
exam_registrations = {course.id: exam_registration_info(request.user, course) for course in courses}
# get info w.r.t ExternalAuthMap
external_auth_map = None
try:
external_auth_map = ExternalAuthMap.objects.get(user=user)
except ExternalAuthMap.DoesNotExist:
pass
context = {'courses': courses,
'course_optouts': course_optouts,
'message': message,
'external_auth_map': external_auth_map,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'cert_statuses': cert_statuses,
'exam_registrations': exam_registrations,
}
return render_to_response('dashboard.html', context)
def try_change_enrollment(request):
"""
This method calls change_enrollment if the necessary POST
parameters are present, but does not return anything. It
simply logs the result or exception. This is usually
called after a registration or login, as secondary action.
It should not interrupt a successful registration or login.
"""
if 'enrollment_action' in request.POST:
try:
enrollment_response = change_enrollment(request)
# There isn't really a way to display the results to the user, so we just log it
# We expect the enrollment to be a success, and will show up on the dashboard anyway
log.info(
"Attempted to automatically enroll after login. Response code: {0}; response body: {1}".format(
enrollment_response.status_code,
enrollment_response.content
)
)
except Exception, e:
log.exception("Exception automatically enrolling after login: {0}".format(str(e)))
def change_enrollment(request):
"""
Modify the enrollment status for the logged-in user.
The request parameter must be a POST request (other methods return 405)
that specifies course_id and enrollment_action parameters. If course_id or
enrollment_action is not specified, if course_id is not valid, if
enrollment_action is something other than "enroll" or "unenroll", if
enrollment_action is "enroll" and enrollment is closed for the course, or
if enrollment_action is "unenroll" and the user is not enrolled in the
course, a 400 error will be returned. If the user is not logged in, 403
will be returned; it is important that only this case return 403 so the
front end can redirect the user to a registration or login page when this
happens. This function should only be called from an AJAX request or
as a post-login/registration helper, so the error messages in the responses
should never actually be user-visible.
"""
if request.method != "POST":
return HttpResponseNotAllowed(["POST"])
user = request.user
if not user.is_authenticated():
return HttpResponseForbidden()
action = request.POST.get("enrollment_action")
course_id = request.POST.get("course_id")
if course_id is None:
return HttpResponseBadRequest(_("Course id not specified"))
if action == "enroll":
# Make sure the course exists
# We don't do this check on unenroll, or a bad course id can't be unenrolled from
try:
course = course_from_id(course_id)
except ItemNotFoundError:
log.warning("User {0} tried to enroll in non-existent course {1}"
.format(user.username, course_id))
return HttpResponseBadRequest(_("Course id is invalid"))
if not has_access(user, course, 'enroll'):
return HttpResponseBadRequest(_("Enrollment is closed"))
org, course_num, run = course_id.split("/")
statsd.increment("common.student.enrollment",
tags=["org:{0}".format(org),
"course:{0}".format(course_num),
"run:{0}".format(run)])
CourseEnrollment.enroll(user, course.id)
return HttpResponse()
elif action == "unenroll":
try:
CourseEnrollment.unenroll(user, course_id)
org, course_num, run = course_id.split("/")
statsd.increment("common.student.unenrollment",
tags=["org:{0}".format(org),
"course:{0}".format(course_num),
"run:{0}".format(run)])
return HttpResponse()
except CourseEnrollment.DoesNotExist:
return HttpResponseBadRequest(_("You are not enrolled in this course"))
else:
return HttpResponseBadRequest(_("Enrollment action is invalid"))
@ensure_csrf_cookie
def accounts_login(request, error=""):
return render_to_response('login.html', {'error': error})
# Need different levels of logging
@ensure_csrf_cookie
def login_user(request, error=""):
"""AJAX request to log in the user."""
if 'email' not in request.POST or 'password' not in request.POST:
return HttpResponse(json.dumps({'success': False,
'value': _('There was an error receiving your login information. Please email us.')})) # TODO: User error message
email = request.POST['email']
password = request.POST['password']
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
AUDIT_LOG.warning(u"Login failed - Unknown user email: {0}".format(email))
user = None
# if the user doesn't exist, we want to set the username to an invalid
# username so that authentication is guaranteed to fail and we can take
# advantage of the ratelimited backend
username = user.username if user else ""
try:
user = authenticate(username=username, password=password, request=request)
# this occurs when there are too many attempts from the same IP address
except RateLimitException:
return HttpResponse(json.dumps({'success': False,
'value': _('Too many failed login attempts. Try again later.')}))
if user is None:
# if we didn't find this username earlier, the account for this email
# doesn't exist, and doesn't have a corresponding password
if username != "":
AUDIT_LOG.warning(u"Login failed - password for {0} is invalid".format(email))
return HttpResponse(json.dumps({'success': False,
'value': _('Email or password is incorrect.')}))
if user is not None and user.is_active:
try:
# We do not log here, because we have a handler registered
# to perform logging on successful logins.
login(request, user)
if request.POST.get('remember') == 'true':
request.session.set_expiry(604800)
log.debug("Setting user session to never expire")
else:
request.session.set_expiry(0)
except Exception as e:
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could not create session. Is memcached running?")
log.exception(e)
raise
try_change_enrollment(request)
statsd.increment("common.student.successful_login")
response = HttpResponse(json.dumps({'success': True}))
# set the login cookie for the edx marketing site
# we want this cookie to be accessed via javascript
# so httponly is set to None
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = cookie_date(expires_time)
response.set_cookie(settings.EDXMKTG_COOKIE_NAME,
'true', max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path='/',
secure=None,
httponly=None)
return response
AUDIT_LOG.warning(u"Login failed - Account not active for user {0}, resending activation".format(username))
reactivation_email_for_user(user)
not_activated_msg = _("This account has not been activated. We have sent another activation message. Please check your e-mail for the activation instructions.")
return HttpResponse(json.dumps({'success': False,
'value': not_activated_msg}))
@ensure_csrf_cookie
def logout_user(request):
"""
HTTP request to log out the user. Redirects to marketing page.
Deletes both the CSRF and sessionid cookies so the marketing
site can determine the logged in state of the user
"""
# We do not log here, because we have a handler registered
# to perform logging on successful logouts.
logout(request)
response = redirect('/')
response.delete_cookie(settings.EDXMKTG_COOKIE_NAME,
path='/',
domain=settings.SESSION_COOKIE_DOMAIN)
return response
@login_required
@ensure_csrf_cookie
def change_setting(request):
"""JSON call to change a profile setting: Right now, location"""
# TODO (vshnayder): location is no longer used
up = UserProfile.objects.get(user=request.user) # request.user.profile_cache
if 'location' in request.POST:
up.location = request.POST['location']
up.save()
return HttpResponse(json.dumps({'success': True,
'location': up.location, }))
def _do_create_account(post_vars):
"""
Given cleaned post variables, create the User and UserProfile objects, as well as the
registration for this user.
Returns a tuple (User, UserProfile, Registration).
Note: this function is also used for creating test users.
"""
user = User(username=post_vars['username'],
email=post_vars['email'],
is_active=False)
user.set_password(post_vars['password'])
registration = Registration()
# TODO: Rearrange so that if part of the process fails, the whole process fails.
# Right now, we can have e.g. no registration e-mail sent out and a zombie account
try:
user.save()
except IntegrityError:
js = {'success': False}
# Figure out the cause of the integrity error
if len(User.objects.filter(username=post_vars['username'])) > 0:
js['value'] = _("An account with the Public Username '{username}' already exists.").format(username=post_vars['username'])
js['field'] = 'username'
return HttpResponse(json.dumps(js))
if len(User.objects.filter(email=post_vars['email'])) > 0:
js['value'] = _("An account with the Email '{email}' already exists.").format(email=post_vars['email'])
js['field'] = 'email'
return HttpResponse(json.dumps(js))
raise
registration.register(user)
profile = UserProfile(user=user)
profile.name = post_vars['name']
profile.level_of_education = post_vars.get('level_of_education')
profile.gender = post_vars.get('gender')
profile.mailing_address = post_vars.get('mailing_address')
profile.goals = post_vars.get('goals')
try:
profile.year_of_birth = int(post_vars['year_of_birth'])
except (ValueError, KeyError):
# If they give us garbage, just ignore it instead
# of asking them to put an integer.
profile.year_of_birth = None
try:
profile.save()
except Exception:
log.exception("UserProfile creation failed for user {id}.".format(id=user.id))
return (user, profile, registration)
@ensure_csrf_cookie
def create_account(request, post_override=None):
"""
JSON call to create new edX account.
Used by form in signup_modal.html, which is included into navigation.html
"""
js = {'success': False}
post_vars = post_override if post_override else request.POST
# if doing signup for an external authorization, then get email, password, name from the eamap
# don't use the ones from the form, since the user could have hacked those
# unless originally we didn't get a valid email or name from the external auth
DoExternalAuth = 'ExternalAuthMap' in request.session
if DoExternalAuth:
eamap = request.session['ExternalAuthMap']
try:
validate_email(eamap.external_email)
email = eamap.external_email
except ValidationError:
email = post_vars.get('email', '')
if eamap.external_name.strip() == '':
name = post_vars.get('name', '')
else:
name = eamap.external_name
password = eamap.internal_password
post_vars = dict(post_vars.items())
post_vars.update(dict(email=email, name=name, password=password))
log.debug(u'In create_account with external_auth: user = %s, email=%s', name, email)
# Confirm we have a properly formed request
for a in ['username', 'email', 'password', 'name']:
if a not in post_vars:
js['value'] = _("Error (401 {field}). E-mail us.").format(field=a)
js['field'] = a
return HttpResponse(json.dumps(js))
if post_vars.get('honor_code', 'false') != u'true':
js['value'] = _("To enroll, you must follow the honor code.").format(field=a)
js['field'] = 'honor_code'
return HttpResponse(json.dumps(js))
# Can't have terms of service for certain SHIB users, like at Stanford
tos_not_required = settings.MITX_FEATURES.get("AUTH_USE_SHIB") \
and settings.MITX_FEATURES.get('SHIB_DISABLE_TOS') \
and DoExternalAuth and ("shib" in eamap.external_domain)
if not tos_not_required:
if post_vars.get('terms_of_service', 'false') != u'true':
js['value'] = _("You must accept the terms of service.").format(field=a)
js['field'] = 'terms_of_service'
return HttpResponse(json.dumps(js))
# Confirm appropriate fields are there.
# TODO: Check e-mail format is correct.
# TODO: Confirm e-mail is not from a generic domain (mailinator, etc.)? Not sure if
# this is a good idea
# TODO: Check password is sane
required_post_vars = ['username', 'email', 'name', 'password', 'terms_of_service', 'honor_code']
if tos_not_required:
required_post_vars = ['username', 'email', 'name', 'password', 'honor_code']
for a in required_post_vars:
if len(post_vars[a]) < 2:
error_str = {'username': 'Username must be minimum of two characters long.',
'email': 'A properly formatted e-mail is required.',
'name': 'Your legal name must be a minimum of two characters long.',
'password': 'A valid password is required.',
'terms_of_service': 'Accepting Terms of Service is required.',
'honor_code': 'Agreeing to the Honor Code is required.'}
js['value'] = error_str[a]
js['field'] = a
return HttpResponse(json.dumps(js))
try:
validate_email(post_vars['email'])
except ValidationError:
js['value'] = _("Valid e-mail is required.").format(field=a)
js['field'] = 'email'
return HttpResponse(json.dumps(js))
try:
validate_slug(post_vars['username'])
except ValidationError:
js['value'] = _("Username should only consist of A-Z and 0-9, with no spaces.").format(field=a)
js['field'] = 'username'
return HttpResponse(json.dumps(js))
# Ok, looks like everything is legit. Create the account.
ret = _do_create_account(post_vars)
if isinstance(ret, HttpResponse): # if there was an error then return that
return ret
(user, profile, registration) = ret
d = {'name': post_vars['name'],
'key': registration.activation_key,
}
# composes activation email
subject = render_to_string('emails/activation_email_subject.txt', d)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', d)
# dont send email if we are doing load testing or random user generation for some reason
if not (settings.MITX_FEATURES.get('AUTOMATIC_AUTH_FOR_TESTING')):
try:
if settings.MITX_FEATURES.get('REROUTE_ACTIVATION_EMAIL'):
dest_addr = settings.MITX_FEATURES['REROUTE_ACTIVATION_EMAIL']
message = ("Activation for %s (%s): %s\n" % (user, user.email, profile.name) +
'-' * 80 + '\n\n' + message)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [dest_addr], fail_silently=False)
else:
_res = user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except:
log.warning('Unable to send activation email to user', exc_info=True)
js['value'] = _('Could not send activation e-mail.')
return HttpResponse(json.dumps(js))
# Immediately after a user creates an account, we log them in. They are only
# logged in until they close the browser. They can't log in again until they click
# the activation link from the email.
login_user = authenticate(username=post_vars['username'], password=post_vars['password'])
login(request, login_user)
request.session.set_expiry(0)
# TODO: there is no error checking here to see that the user actually logged in successfully,
# and is not yet an active user.
if login_user is not None:
AUDIT_LOG.info(u"Login success on new account creation - {0}".format(login_user.username))
if DoExternalAuth:
eamap.user = login_user
eamap.dtsignup = datetime.datetime.now(UTC)
eamap.save()
AUDIT_LOG.info("User registered with external_auth %s", post_vars['username'])
AUDIT_LOG.info('Updated ExternalAuthMap for %s to be %s', post_vars['username'], eamap)
if settings.MITX_FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'):
log.info('bypassing activation email')
login_user.is_active = True
login_user.save()
AUDIT_LOG.info(u"Login activated on extauth account - {0} ({1})".format(login_user.username, login_user.email))
try_change_enrollment(request)
statsd.increment("common.student.account_created")
js = {'success': True}
HttpResponse(json.dumps(js), mimetype="application/json")
response = HttpResponse(json.dumps({'success': True}))
# set the login cookie for the edx marketing site
# we want this cookie to be accessed via javascript
# so httponly is set to None
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = cookie_date(expires_time)
response.set_cookie(settings.EDXMKTG_COOKIE_NAME,
'true', max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path='/',
secure=None,
httponly=None)
return response
def exam_registration_info(user, course):
""" Returns a Registration object if the user is currently registered for a current
exam of the course. Returns None if the user is not registered, or if there is no
current exam for the course.
"""
exam_info = course.current_test_center_exam
if exam_info is None:
return None
exam_code = exam_info.exam_series_code
registrations = get_testcenter_registration(user, course.id, exam_code)
if registrations:
registration = registrations[0]
else:
registration = None
return registration
@login_required
@ensure_csrf_cookie
def begin_exam_registration(request, course_id):
""" Handles request to register the user for the current
test center exam of the specified course. Called by form
in dashboard.html.
"""
user = request.user
try:
course = course_from_id(course_id)
except ItemNotFoundError:
log.error("User {0} enrolled in non-existent course {1}".format(user.username, course_id))
raise Http404
# get the exam to be registered for:
# (For now, we just assume there is one at most.)
# if there is no exam now (because someone bookmarked this stupid page),
# then return a 404:
exam_info = course.current_test_center_exam
if exam_info is None:
raise Http404
# determine if the user is registered for this course:
registration = exam_registration_info(user, course)
# we want to populate the registration page with the relevant information,
# if it already exists. Create an empty object otherwise.
try:
testcenteruser = TestCenterUser.objects.get(user=user)
except TestCenterUser.DoesNotExist:
testcenteruser = TestCenterUser()
testcenteruser.user = user
context = {'course': course,
'user': user,
'testcenteruser': testcenteruser,
'registration': registration,
'exam_info': exam_info,
}
return render_to_response('test_center_register.html', context)
@ensure_csrf_cookie
def create_exam_registration(request, post_override=None):
"""
JSON call to create a test center exam registration.
Called by form in test_center_register.html
"""
post_vars = post_override if post_override else request.POST
# first determine if we need to create a new TestCenterUser, or if we are making any update
# to an existing TestCenterUser.
username = post_vars['username']
user = User.objects.get(username=username)
course_id = post_vars['course_id']
course = course_from_id(course_id) # assume it will be found....
# make sure that any demographic data values received from the page have been stripped.
# Whitespace is not an acceptable response for any of these values
demographic_data = {}
for fieldname in TestCenterUser.user_provided_fields():
if fieldname in post_vars:
demographic_data[fieldname] = (post_vars[fieldname]).strip()
try:
testcenter_user = TestCenterUser.objects.get(user=user)
needs_updating = testcenter_user.needs_update(demographic_data)
log.info("User {0} enrolled in course {1} {2}updating demographic info for exam registration".format(user.username, course_id, "" if needs_updating else "not "))
except TestCenterUser.DoesNotExist:
# do additional initialization here:
testcenter_user = TestCenterUser.create(user)
needs_updating = True
log.info("User {0} enrolled in course {1} creating demographic info for exam registration".format(user.username, course_id))
# perform validation:
if needs_updating:
# first perform validation on the user information
# using a Django Form.
form = TestCenterUserForm(instance=testcenter_user, data=demographic_data)
if form.is_valid():
form.update_and_save()
else:
response_data = {'success': False}
# return a list of errors...
response_data['field_errors'] = form.errors
response_data['non_field_errors'] = form.non_field_errors()
return HttpResponse(json.dumps(response_data), mimetype="application/json")
# create and save the registration:
needs_saving = False
exam = course.current_test_center_exam
exam_code = exam.exam_series_code
registrations = get_testcenter_registration(user, course_id, exam_code)
if registrations:
registration = registrations[0]
# NOTE: we do not bother to check here to see if the registration has changed,
# because at the moment there is no way for a user to change anything about their
# registration. They only provide an optional accommodation request once, and
# cannot make changes to it thereafter.
# It is possible that the exam_info content has been changed, such as the
# scheduled exam dates, but those kinds of changes should not be handled through
# this registration screen.
else:
accommodation_request = post_vars.get('accommodation_request', '')
registration = TestCenterRegistration.create(testcenter_user, exam, accommodation_request)
needs_saving = True
log.info("User {0} enrolled in course {1} creating new exam registration".format(user.username, course_id))
if needs_saving:
# do validation of registration. (Mainly whether an accommodation request is too long.)
form = TestCenterRegistrationForm(instance=registration, data=post_vars)
if form.is_valid():
form.update_and_save()
else:
response_data = {'success': False}
# return a list of errors...
response_data['field_errors'] = form.errors
response_data['non_field_errors'] = form.non_field_errors()
return HttpResponse(json.dumps(response_data), mimetype="application/json")
# only do the following if there is accommodation text to send,
# and a destination to which to send it.
# TODO: still need to create the accommodation email templates
# if 'accommodation_request' in post_vars and 'TESTCENTER_ACCOMMODATION_REQUEST_EMAIL' in settings:
# d = {'accommodation_request': post_vars['accommodation_request'] }
#
# # composes accommodation email
# subject = render_to_string('emails/accommodation_email_subject.txt', d)
# # Email subject *must not* contain newlines
# subject = ''.join(subject.splitlines())
# message = render_to_string('emails/accommodation_email.txt', d)
#
# try:
# dest_addr = settings['TESTCENTER_ACCOMMODATION_REQUEST_EMAIL']
# from_addr = user.email
# send_mail(subject, message, from_addr, [dest_addr], fail_silently=False)
# except:
# log.exception(sys.exc_info())
# response_data = {'success': False}
# response_data['non_field_errors'] = [ 'Could not send accommodation e-mail.', ]
# return HttpResponse(json.dumps(response_data), mimetype="application/json")
js = {'success': True}
return HttpResponse(json.dumps(js), mimetype="application/json")
def auto_auth(request):
"""
Automatically logs the user in with a generated random credentials
This view is only accessible when
settings.MITX_SETTINGS['AUTOMATIC_AUTH_FOR_TESTING'] is true.
"""
def get_dummy_post_data(username, password, email, name):
"""
Return a dictionary suitable for passing to post_vars of _do_create_account or post_override
of create_account, with specified values.
"""
return {'username': username,
'email': email,
'password': password,
'name': name,
'honor_code': u'true',
'terms_of_service': u'true', }
# generate random user credentials from a small name space (determined by settings)
name_base = 'USER_'
pass_base = 'PASS_'
max_users = settings.MITX_FEATURES.get('MAX_AUTO_AUTH_USERS', 200)
number = random.randint(1, max_users)
# Get the params from the request to override default user attributes if specified
qdict = request.GET
# Use the params from the request, otherwise use these defaults
username = qdict.get('username', name_base + str(number))
password = qdict.get('password', pass_base + str(number))
email = qdict.get('email', '%s_dummy_test@mitx.mit.edu' % username)
name = qdict.get('name', '%s Test' % username)
# if they already are a user, log in
try:
user = User.objects.get(username=username)
user = authenticate(username=username, password=password, request=request)
login(request, user)
# else create and activate account info
except ObjectDoesNotExist:
post_override = get_dummy_post_data(username, password, email, name)
create_account(request, post_override=post_override)
request.user.is_active = True
request.user.save()
# return empty success
return HttpResponse('')
@ensure_csrf_cookie
def activate_account(request, key):
"""When link in activation e-mail is clicked"""
r = Registration.objects.filter(activation_key=key)
if len(r) == 1:
user_logged_in = request.user.is_authenticated()
already_active = True
if not r[0].user.is_active:
r[0].activate()
already_active = False
# Enroll student in any pending courses he/she may have if auto_enroll flag is set
student = User.objects.filter(id=r[0].user_id)
if student:
ceas = CourseEnrollmentAllowed.objects.filter(email=student[0].email)
for cea in ceas:
if cea.auto_enroll:
CourseEnrollment.enroll(student[0], cea.course_id)
resp = render_to_response(
"registration/activation_complete.html",
{
'user_logged_in': user_logged_in,
'already_active': already_active
}
)
return resp
if len(r) == 0:
return render_to_response(
"registration/activation_invalid.html",
{'csrf': csrf(request)['csrf_token']}
)
return HttpResponse(_("Unknown error. Please e-mail us to let us know how it happened."))
@ensure_csrf_cookie
def password_reset(request):
""" Attempts to send a password reset e-mail. """
if request.method != "POST":
raise Http404
form = PasswordResetFormNoActive(request.POST)
if form.is_valid():
form.save(use_https=request.is_secure(),
from_email=settings.DEFAULT_FROM_EMAIL,
request=request,
domain_override=request.get_host())
return HttpResponse(json.dumps({'success': True,
'value': render_to_string('registration/password_reset_done.html', {})}))
else:
return HttpResponse(json.dumps({'success': False,
'error': _('Invalid e-mail or user')}))
def password_reset_confirm_wrapper(
request,
uidb36=None,
token=None,
):
""" A wrapper around django.contrib.auth.views.password_reset_confirm.
Needed because we want to set the user as active at this step.
"""
# cribbed from django.contrib.auth.views.password_reset_confirm
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
user.is_active = True
user.save()
except (ValueError, User.DoesNotExist):
pass
# we also want to pass settings.PLATFORM_NAME in as extra_context
extra_context = {"platform_name": settings.PLATFORM_NAME}
return password_reset_confirm(
request, uidb36=uidb36, token=token, extra_context=extra_context
)
def reactivation_email_for_user(user):
try:
reg = Registration.objects.get(user=user)
except Registration.DoesNotExist:
return HttpResponse(json.dumps({'success': False,
'error': _('No inactive user with this e-mail exists')}))
d = {'name': user.profile.name,
'key': reg.activation_key}
subject = render_to_string('emails/activation_email_subject.txt', d)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', d)
try:
_res = user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except:
log.warning('Unable to send reactivation email', exc_info=True)
return HttpResponse(json.dumps({'success': False, 'error': _('Unable to send reactivation email')}))
return HttpResponse(json.dumps({'success': True}))
@ensure_csrf_cookie
def change_email_request(request):
""" AJAX call from the profile page. User wants a new e-mail.
"""
## Make sure it checks for existing e-mail conflicts
if not request.user.is_authenticated:
raise Http404
user = request.user
if not user.check_password(request.POST['password']):
return HttpResponse(json.dumps({'success': False,
'error': _('Invalid password')}))
new_email = request.POST['new_email']
try:
validate_email(new_email)
except ValidationError:
return HttpResponse(json.dumps({'success': False,
'error': _('Valid e-mail address required.')}))
if User.objects.filter(email=new_email).count() != 0:
## CRITICAL TODO: Handle case sensitivity for e-mails
return HttpResponse(json.dumps({'success': False,
'error': _('An account with this e-mail already exists.')}))
pec_list = PendingEmailChange.objects.filter(user=request.user)
if len(pec_list) == 0:
pec = PendingEmailChange()
pec.user = user
else:
pec = pec_list[0]
pec.new_email = request.POST['new_email']
pec.activation_key = uuid.uuid4().hex
pec.save()
if pec.new_email == user.email:
pec.delete()
return HttpResponse(json.dumps({'success': False,
'error': _('Old email is the same as the new email.')}))
d = {'key': pec.activation_key,
'old_email': user.email,
'new_email': pec.new_email}
subject = render_to_string('emails/email_change_subject.txt', d)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/email_change.txt', d)
_res = send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [pec.new_email])
return HttpResponse(json.dumps({'success': True}))
@ensure_csrf_cookie
@transaction.commit_manually
def confirm_email_change(request, key):
""" User requested a new e-mail. This is called when the activation
link is clicked. We confirm with the old e-mail, and update
"""
try:
try:
pec = PendingEmailChange.objects.get(activation_key=key)
except PendingEmailChange.DoesNotExist:
transaction.rollback()
return render_to_response("invalid_email_key.html", {})
user = pec.user
address_context = {
'old_email': user.email,
'new_email': pec.new_email
}
if len(User.objects.filter(email=pec.new_email)) != 0:
transaction.rollback()
return render_to_response("email_exists.html", {})
subject = render_to_string('emails/email_change_subject.txt', address_context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/confirm_email_change.txt', address_context)
up = UserProfile.objects.get(user=user)
meta = up.get_meta()
if 'old_emails' not in meta:
meta['old_emails'] = []
meta['old_emails'].append([user.email, datetime.datetime.now(UTC).isoformat()])
up.set_meta(meta)
up.save()
# Send it to the old email...
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception:
transaction.rollback()
log.warning('Unable to send confirmation email to old address', exc_info=True)
return render_to_response("email_change_failed.html", {'email': user.email})
user.email = pec.new_email
user.save()
pec.delete()
# And send it to the new email...
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception:
transaction.rollback()
log.warning('Unable to send confirmation email to new address', exc_info=True)
return render_to_response("email_change_failed.html", {'email': pec.new_email})
transaction.commit()
return render_to_response("email_change_successful.html", address_context)
except Exception:
# If we get an unexpected exception, be sure to rollback the transaction
transaction.rollback()
raise
@ensure_csrf_cookie
def change_name_request(request):
""" Log a request for a new name. """
if not request.user.is_authenticated:
raise Http404
try:
pnc = PendingNameChange.objects.get(user=request.user)
except PendingNameChange.DoesNotExist:
pnc = PendingNameChange()
pnc.user = request.user
pnc.new_name = request.POST['new_name']
pnc.rationale = request.POST['rationale']
if len(pnc.new_name) < 2:
return HttpResponse(json.dumps({'success': False, 'error': _('Name required')}))
pnc.save()
# The following automatically accepts name change requests. Remove this to
# go back to the old system where it gets queued up for admin approval.
accept_name_change_by_id(pnc.id)
return HttpResponse(json.dumps({'success': True}))
@ensure_csrf_cookie
def pending_name_changes(request):
""" Web page which allows staff to approve or reject name changes. """
if not request.user.is_staff:
raise Http404
changes = list(PendingNameChange.objects.all())
js = {'students': [{'new_name': c.new_name,
'rationale': c.rationale,
'old_name': UserProfile.objects.get(user=c.user).name,
'email': c.user.email,
'uid': c.user.id,
'cid': c.id} for c in changes]}
return render_to_response('name_changes.html', js)
@ensure_csrf_cookie
def reject_name_change(request):
""" JSON: Name change process. Course staff clicks 'reject' on a given name change """
if not request.user.is_staff:
raise Http404
try:
pnc = PendingNameChange.objects.get(id=int(request.POST['id']))
except PendingNameChange.DoesNotExist:
return HttpResponse(json.dumps({'success': False, 'error': _('Invalid ID')}))
pnc.delete()
return HttpResponse(json.dumps({'success': True}))
def accept_name_change_by_id(id):
try:
pnc = PendingNameChange.objects.get(id=id)
except PendingNameChange.DoesNotExist:
return HttpResponse(json.dumps({'success': False, 'error': _('Invalid ID')}))
u = pnc.user
up = UserProfile.objects.get(user=u)
# Save old name
meta = up.get_meta()
if 'old_names' not in meta:
meta['old_names'] = []
meta['old_names'].append([up.name, pnc.rationale, datetime.datetime.now(UTC).isoformat()])
up.set_meta(meta)
up.name = pnc.new_name
up.save()
pnc.delete()
return HttpResponse(json.dumps({'success': True}))
@ensure_csrf_cookie
def accept_name_change(request):
""" JSON: Name change process. Course staff clicks 'accept' on a given name change
We used this during the prototype but now we simply record name changes instead
of manually approving them. Still keeping this around in case we want to go
back to this approval method.
"""
if not request.user.is_staff:
raise Http404
return accept_name_change_by_id(int(request.POST['id']))
@require_POST
@login_required
@ensure_csrf_cookie
def change_email_settings(request):
"""Modify logged-in user's setting for receiving emails from a course."""
user = request.user
course_id = request.POST.get("course_id")
receive_emails = request.POST.get("receive_emails")
if receive_emails:
optout_object = Optout.objects.filter(user=user, course_id=course_id)
if optout_object:
optout_object.delete()
log.info(u"User {0} ({1}) opted in to receive emails from course {2}".format(user.username, user.email, course_id))
track.views.server_track(request, "change-email-settings", {"receive_emails": "yes", "course": course_id}, page='dashboard')
else:
Optout.objects.get_or_create(user=user, course_id=course_id)
log.info(u"User {0} ({1}) opted out of receiving emails from course {2}".format(user.username, user.email, course_id))
track.views.server_track(request, "change-email-settings", {"receive_emails": "no", "course": course_id}, page='dashboard')
return HttpResponse(json.dumps({'success': True}))
| pdehaye/theming-edx-platform | common/djangoapps/student/views.py | Python | agpl-3.0 | 51,566 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.